Compare commits

..

2253 Commits

Author SHA1 Message Date
fc9dd3487d 2025-10-26 nightly release (cdb60e44eb528bf02c6bb2d7e384298283e755ca) 2025-10-26 07:34:09 +00:00
af360b15da 2025-10-25 nightly release (b31bad1b8f1331bf43d47f46602cf6141db56844) 2025-10-25 07:34:15 +00:00
e5cabbf5f8 2025-10-24 nightly release (c12293dcbea3d9e9c5fade8e4c838f8a57c300e7) 2025-10-24 07:34:52 +00:00
958cc1742e 2025-10-23 nightly release (fb277a59167031883cc42249ed8fe869f39bd5ed) 2025-10-23 07:35:16 +00:00
72d577ba18 2025-10-22 nightly release (5211f4c1088f564cb15146e41bc592b7cf1824af) 2025-10-22 07:35:41 +00:00
94549de0a5 2025-10-21 nightly release (0e083942ccd98c5a2d9386f0b2c4bbc901be40ef) 2025-10-21 07:35:38 +00:00
427bf59f34 2025-10-20 nightly release (61d9a5180e44e43c2fe0394916688d756ea99da2) 2025-10-20 07:35:28 +00:00
f2b29280ef 2025-10-19 nightly release (c4f6619330bdac5bf4addb9070ecb42994202e1f) 2025-10-19 07:34:02 +00:00
92ae61a1ed 2025-10-18 nightly release (f510d0dbc0108a90c4b0275eb761bf189ff7a7d2) 2025-10-18 18:52:37 +00:00
411066f7cf 2025-10-18 nightly release (fe80f03726a7a50439be063327b67c7fba6279b2) 2025-10-18 07:34:05 +00:00
79a37055e7 2025-10-17 nightly release (556fc09a9f67f24ca5591ec049c5d0c347c5f62a) 2025-10-17 07:34:42 +00:00
0dfcb1a118 2025-10-16 nightly release (003dd130730993eedc302f769b7b653016ab6450) 2025-10-16 07:35:10 +00:00
0fec8d0539 2025-10-15 nightly release (e6f766c7d750d40603eee3f66c5915bac606b3ea) 2025-10-15 07:35:42 +00:00
89cf1adaf0 2025-10-14 nightly release (3a110c9bb209ccd690986d1593b44d261c1174a5) 2025-10-14 07:35:19 +00:00
40c3219a9e 2025-10-13 nightly release (3a110c9bb209ccd690986d1593b44d261c1174a5) 2025-10-13 07:35:26 +00:00
8d7cc00898 2025-10-12 nightly release (4f8a986b8feb4a171b8a68a2a3664275ec54a75f) 2025-10-12 07:33:57 +00:00
41aa60b096 2025-10-11 nightly release (12d7cc5cd3da00094c5801aff4c77550e2a59528) 2025-10-11 07:34:06 +00:00
a96cef0b4d 2025-10-10 nightly release (a57a14868dcfd9dabf9bd19b6b11f31967c80c87) 2025-10-10 07:34:21 +00:00
7d06d03434 2025-10-09 nightly release (7a1ead755f2e2abe8be49a7a0fb88b6b13973147) 2025-10-09 07:34:26 +00:00
f1efb72466 2025-10-08 nightly release (608792153f42254d2d2b5a87d524807a0c2724f1) 2025-10-08 07:34:27 +00:00
8fab07c5ca 2025-10-07 nightly release (bcd96cc6ff798281e66aabef6ce72542fdc97c7a) 2025-10-07 07:34:18 +00:00
39cdb9bef4 2025-10-06 nightly release (9fff8155c362da777e7ce31b85fb2dc7cfced2d5) 2025-10-06 07:34:40 +00:00
b8d25c1f42 2025-10-05 nightly release (cf0a00d4f38775e5a82a166e367f40383c606963) 2025-10-05 07:34:00 +00:00
cf9d09490c 2025-10-04 nightly release (9d1ab4f4bb508a72c7f549f0b5219c4601944ba1) 2025-10-04 07:33:56 +00:00
d1da98fe75 2025-10-03 nightly release (d1cbb74fb16406488a174832e1b58b7c242f418d) 2025-10-03 07:34:23 +00:00
d3d23546a3 2025-10-02 nightly release (144378615a5a2b347e39c6376cba7d75f7a82926) 2025-10-02 07:34:04 +00:00
8d091f3460 2025-10-01 nightly release (e30f01b5b569c0a6043774ee096d9d0bca231c93) 2025-10-01 07:35:14 +00:00
23acabb207 2025-09-30 nightly release (0b0ed6fd335dddccaf1b7212a785808c38cf98a1) 2025-09-30 07:35:13 +00:00
e86844ce4c 2025-09-29 nightly release (352197c50886c5a3b5b5e7ab86eba52548354d5c) 2025-09-29 07:35:06 +00:00
a5b10a3c27 2025-09-28 nightly release (3059b080128a0d13760f5e3ed9bf226c8c1a3ba5) 2025-09-28 07:33:49 +00:00
48f35f8940 2025-09-27 nightly release (9dac6437da12ef636139b7ae515c10617ffed528) 2025-09-27 07:33:53 +00:00
f64285495c 2025-09-26 nightly release (67cc0e0ac9fa009cbe6bd4d7852b39a579e92282) 2025-09-26 07:34:55 +00:00
337ce5ab49 2025-09-25 nightly release (ad2f7315ca66b42497047bb7951f696b50f1e81b) 2025-09-25 07:34:40 +00:00
5e8f2b0e21 2025-09-24 nightly release (d746b987d8239595438b9dbd0f96ec6a4fed4653) 2025-09-24 07:34:48 +00:00
6ba37938af 2025-09-23 nightly release (8f30a8dc47d61b0110f5797cad97040b1a58e6a0) 2025-09-23 07:35:01 +00:00
3ac0051187 2025-09-22 nightly release (0b59492853b7347ead6b71f52c76e7ac2836ea27) 2025-09-22 07:35:17 +00:00
e77b99230a 2025-09-21 nightly release (97eb7a281dab3d0454d72da1c72d4993a0b59a43) 2025-09-21 07:33:53 +00:00
d60c6e4a06 2025-09-20 nightly release (a31acf32bd18e115df910002aef42baf7a9b4a33) 2025-09-20 07:34:03 +00:00
2f25275aa3 2025-09-19 nightly release (04842ac2b013daf3029145d785b3805dfd3129a3) 2025-09-19 07:34:05 +00:00
a701395da4 2025-09-18 nightly release (13304401dfaab91a5f6311a09e77ed71914d6639) 2025-09-18 07:34:29 +00:00
5281945d57 2025-09-17 nightly release (65845d72917fc27cd89a88b067e7c8f44bc0c987) 2025-09-17 07:34:37 +00:00
f08d26d177 2025-09-16 nightly release (e900a274e5c7fd7f0c76a991ad182c635f807c83) 2025-09-16 07:34:24 +00:00
53a2908a10 2025-09-15 nightly release (76e5df3866f11712a3e6306bb0e5055c223c78f3) 2025-09-15 07:34:47 +00:00
8b8cd78b2b 2025-09-14 nightly release (f01bf0f64b2fb9a761d7a147f17a862561bc5baf) 2025-09-14 07:33:56 +00:00
27a4e36cf2 2025-09-13 nightly release (595e13feb71c10621271c30cb402906b6cc83e13) 2025-09-13 07:33:51 +00:00
616c50a126 2025-09-12 nightly release (ccb450b190a9c24fc53ca8f120bd1cdf36c312c2) 2025-09-12 07:33:59 +00:00
79440d8bcc 2025-09-11 nightly release (f654cff5663c1972172f150f529a587fc3c0d2c1) 2025-09-11 07:34:23 +00:00
75e7f49f9c 2025-09-10 nightly release (484c4093a87a3e6767e55ed553f95db8fc137442) 2025-09-10 07:34:16 +00:00
1ff17af8ec 2025-09-09 nightly release (a965f0979307d2d3894f00420e6d901c50f89d7a) 2025-09-09 07:34:30 +00:00
6d74d91e42 2025-09-08 nightly release (5babb4d5c04b1ff7ed5f96f7aea1898cd4faef5a) 2025-09-08 07:34:34 +00:00
d110794e53 2025-09-07 nightly release (4d4abec80f03cd8fdefe1d9cb3a60d3690cd777e) 2025-09-07 07:33:56 +00:00
b35c908b70 2025-09-06 nightly release (4d4abec80f03cd8fdefe1d9cb3a60d3690cd777e) 2025-09-06 07:34:07 +00:00
e018da741d 2025-09-05 nightly release (b67c41039835bd9b20b83cd6233e86baaa5f5dde) 2025-09-05 07:34:20 +00:00
6f6ce1a7ab 2025-09-04 nightly release (d636c181f9140a7b59be10b36eae23039fc2bb72) 2025-09-04 07:33:59 +00:00
aa0545ff8e 2025-09-03 nightly release (6737e2c996990024187ba620d2764f3b6f6add2c) 2025-09-03 07:34:25 +00:00
a0e3ca21ab 2025-09-02 nightly release (6737e2c996990024187ba620d2764f3b6f6add2c) 2025-09-02 07:35:23 +00:00
4ee895974d 2025-09-01 nightly release (9a665ca3c472384e9d722bddba79e5a7680f1abd) 2025-09-01 07:36:00 +00:00
2afb96c3e1 2025-08-31 nightly release (ad7b748686610e317e5c0cbbd523b7a6e3b8b51f) 2025-08-31 07:33:52 +00:00
5661512350 2025-08-30 nightly release (f6368e934e6bef84211f7db82c22e3623038e43f) 2025-08-30 07:33:55 +00:00
8296ac99be 2025-08-29 nightly release (d153af713e8bf825534b55d29fd7cfef6a3a5071) 2025-08-29 07:34:44 +00:00
e74f214c33 2025-08-28 nightly release (e9d34b2438d65d6d16109e2416f3698de20f85c2) 2025-08-28 07:34:51 +00:00
c6897fdd85 2025-08-27 nightly release (cd87f3029582cedb3b88747a3bd7d200b05c1138) 2025-08-27 07:34:42 +00:00
95dc3256f1 2025-08-26 nightly release (e34b6a01039df5d8940acdccd8d8989f3cd827aa) 2025-08-26 07:35:53 +00:00
dcc2abce2d 2025-08-25 nightly release (726dce3c944cbda16e54d3b15cdb4b6ced05af72) 2025-08-25 07:35:46 +00:00
7ad0c2909e 2025-08-24 nightly release (3e5b021f217a42ae55dc690083f67a28126808ed) 2025-08-24 07:34:16 +00:00
f853a0f43e 2025-08-23 nightly release (c2390087c34c964ef648addf43efb8c6a34e30c2) 2025-08-23 07:34:17 +00:00
d9a80aff4c 2025-08-22 nightly release (cc2b65a91ae7773d4ecf9a600dda48fc3e69aa8f) 2025-08-22 07:34:51 +00:00
22b0a727eb 2025-08-21 nightly release (39862acb2e320783245d2a03acfd1b14cae28038) 2025-08-21 07:36:29 +00:00
7956a1d1d0 2025-08-20 nightly release (2b62ef74208792c7c4bf923f872e54b5f384efc8) 2025-08-20 07:35:54 +00:00
0ce7e98ca9 2025-08-19 nightly release (58f9a3dd6391397e439c5f5075837e8f983735aa) 2025-08-19 07:35:31 +00:00
1fb85cc366 2025-08-18 nightly release (3ced4f1e6cb37d3470dcc540892dee08a6019cf8) 2025-08-18 07:37:41 +00:00
194cdb4509 2025-08-17 nightly release (8f434545c2e48c858d8b0d06db8f9642d6a87ad0) 2025-08-17 07:34:55 +00:00
32d1befbfe 2025-08-16 nightly release (b74c7cd335180a8ebcd5284f2909a73c9a87aa33) 2025-08-16 07:34:40 +00:00
0b5972dede 2025-08-15 nightly release (5b9ad951f8195865e13a44fe09a78bf95973f2fa) 2025-08-15 07:36:16 +00:00
a82d678c97 2025-08-14 nightly release (3faee0a6318afcbbbb48687009a459214910d820) 2025-08-14 07:36:46 +00:00
b81cea13b4 2025-08-13 nightly release (a354fa91e26b376d96385a2206c5ff5b42aa4600) 2025-08-13 07:37:16 +00:00
916ef1939b 2025-08-12 nightly release (bfc873d02ec413344717493e4175a902921359fd) 2025-08-12 07:36:51 +00:00
113daf02cb 2025-08-11 nightly release (e7152ff8a6a929a0db7f3f4a72a5b6d471769cd3) 2025-08-11 07:39:07 +00:00
dd1b1d0d2b 2025-08-10 nightly release (01f66d08d93365015f4af005a252f439c4d4013a) 2025-08-10 07:35:25 +00:00
86d99efdc4 2025-08-09 nightly release (5ed4f9177907fe403ec4c4499d0d0e9be6b68fcf) 2025-08-09 07:35:19 +00:00
d9796cf6b9 2025-08-08 nightly release (3fcd79e023da7156ac584992ebab29205d3b7881) 2025-08-08 07:39:27 +00:00
f9acb53318 2025-08-07 nightly release (2507ae63f293354170695fd20a5c5ce5f64e323d) 2025-08-07 07:39:26 +00:00
95ba6271fc 2025-08-06 nightly release (74a754aae98aabc2aca67e5edb41cc684fae9a82) 2025-08-06 07:39:45 +00:00
77bc90913a 2025-08-05 nightly release (efc4b460b3789d97b87d775f86c5f4ee8dfb5629) 2025-08-05 07:39:49 +00:00
af79a3aadc 2025-08-04 nightly release (978e3a91421e82fc95b34e75efd6324e3e89e755) 2025-08-04 07:41:32 +00:00
6f9445d860 2025-08-03 nightly release (38895c0ac26e4a18a6be0f88c3ec0c96f8077852) 2025-08-03 07:36:11 +00:00
8d0646ae44 2025-08-02 nightly release (e57a92734da09b96add98bc06693b62122c5df72) 2025-08-02 07:35:54 +00:00
b4f42c92a0 2025-08-01 nightly release (1ebcba4e1b1a5f20324cc94c2b92f260a0c1c0ab) 2025-08-01 07:39:37 +00:00
bb53f04097 2025-07-31 nightly release (3e5e0946152ca3500c0491927c26724f7f8ac244) 2025-07-31 07:38:40 +00:00
6e2a16b899 2025-07-30 nightly release (badd0618e4d9488e48f500664084356041cd6d44) 2025-07-30 07:39:50 +00:00
2ae9c24a31 2025-07-29 nightly release (08ea8fccaf28155466dbc1da203219941f4b08c0) 2025-07-29 07:38:58 +00:00
fa58508f47 2025-07-28 nightly release (f63673626d4c6a574a49f9e0d1131764d46b2bb8) 2025-07-28 07:39:22 +00:00
0669d84e19 2025-07-27 nightly release (d72ebefe3fa7d3ee0e9c9b399f5c07611e790664) 2025-07-27 07:35:54 +00:00
0f22b01329 2025-07-26 nightly release (c6b479bc09090336dcd1c97f5e368e76490065ea) 2025-07-26 07:36:02 +00:00
fa05753590 2025-07-25 nightly release (6fc0ad22f0a07b6f38d138861c56a765d5a9bb02) 2025-07-25 07:38:10 +00:00
e4d06f14ac 2025-07-24 nightly release (febf3c475e6fe369b41ef009f3598659a6df0911) 2025-07-24 07:38:13 +00:00
add37ba086 2025-07-23 nightly release (2dccff7dcf56b0d168ebfd7ca08bdeca37273c56) 2025-07-23 07:38:56 +00:00
5ec79a9787 2025-07-22 nightly release (350d6af52c76481d0f386208b6b86be93b7ff22d) 2025-07-22 07:38:13 +00:00
d59923f01d 2025-07-21 nightly release (70b4a8880b1c3fb5e92c5fcd75bda6b6f299abac) 2025-07-21 07:40:42 +00:00
1c2e48b67a 2025-07-20 nightly release (4869f7117009fb99a57482fce56b00c6163fbce6) 2025-07-20 07:35:32 +00:00
d78a49992c 2025-07-19 nightly release (90b082e207bff79dd09d89cfef9be49de5c2ad83) 2025-07-19 07:35:34 +00:00
744d29186e 2025-07-18 nightly release (89d842fec5229fff0df5342b2db121368d51e717) 2025-07-18 07:37:56 +00:00
9e5df57ebb 2025-07-17 nightly release (f6d138807f138868de0397936e2bee482c1fb987) 2025-07-17 07:37:40 +00:00
ab43fe4bdf 2025-07-16 nightly release (03852ddc22350eb8b6ed6b61777639ce6080f3dc) 2025-07-16 07:37:49 +00:00
d38be5ebdb 2025-07-15 nightly release (9345279c6ebdbad95b7b53bc2cb6f63a4e57b2cc) 2025-07-15 07:38:13 +00:00
90f1e7bed1 2025-07-14 nightly release (1f57e0e04da9d334e238cec346f7ae3667bed9d1) 2025-07-14 07:38:24 +00:00
8f2a1f3416 2025-07-13 nightly release (5aee022d8b2bc9d31ddaf877315ffb8ad9d62985) 2025-07-13 07:36:19 +00:00
987acea60a 2025-07-12 nightly release (4ff9b7fa3116b1c429e577830ac6e816734ad029) 2025-07-12 07:35:43 +00:00
87b2ac5d19 2025-07-11 nightly release (ae86e8f6c829a3cfa9204949156fce2d048c919b) 2025-07-11 07:37:34 +00:00
1b995e9218 2025-07-10 nightly release (3232b57cd87fbd15c990fbf87d181716a1993a55) 2025-07-10 07:36:45 +00:00
1c10a682ee 2025-07-09 nightly release (1b3d69b59f92383633731aada8383ab88da3ed60) 2025-07-09 07:36:52 +00:00
c5b9696986 2025-07-08 nightly release (12f9942b107acc9d7acf9591818c826ef972a0f5) 2025-07-08 07:36:53 +00:00
d9bca3a299 2025-07-07 nightly release (d26ca5de058dbcf56ac52bb43e84dd98df2ace97) 2025-07-07 07:36:53 +00:00
e067016da8 2025-07-06 nightly release (7cda4017ddda554752e89069ae205be5e8388f59) 2025-07-06 07:34:39 +00:00
d9c875e47b 2025-07-05 nightly release (f7127b9b940a98596599acda1f89fc5153635a5d) 2025-07-05 07:34:44 +00:00
522bab616b 2025-07-04 nightly release (a6fab82b16011213cb010c8c50461b9a680748a2) 2025-07-04 07:37:22 +00:00
76578d0c40 2025-07-03 nightly release (5cc4e856fda4accb2e9291527693a91dc2a18d89) 2025-07-03 07:36:27 +00:00
dd43031b67 2025-07-02 nightly release (4500a4aa50141ed30e093ef8491b30d1d1287348) 2025-07-02 07:36:13 +00:00
65d9261836 2025-07-01 nightly release (c174f3a6a55864cedb8f6d9014e9b8cadf91186b) 2025-07-01 07:37:29 +00:00
249addeefd 2025-06-30 nightly release (771be857043bf794cd219a9b925e308e31f12314) 2025-06-30 07:37:04 +00:00
fcd40af2df 2025-06-29 nightly release (bccb8473fed94dbc6f1392d0c5b4a51150ee4a9a) 2025-06-29 07:34:58 +00:00
6b21a0c286 2025-06-28 nightly release (feea575082439a0496dd404a4925b7d551039065) 2025-06-28 07:34:50 +00:00
3cce5f1d04 2025-06-27 nightly release (382c6190c1329e96e71eef21a19737a3eda0040b) 2025-06-27 07:36:18 +00:00
0fb413a39a 2025-06-26 nightly release (85df746892d9b0e87e7a5dfa78ef81a84aec6de0) 2025-06-26 07:36:28 +00:00
d9c0ee5520 2025-06-25 nightly release (9b498d3bb28b8e3411ce464dd2755c5b96d92c8f) 2025-06-25 07:36:29 +00:00
fbea094c72 2025-06-24 nightly release (c82a174ceab79f77ba18405dd263eb03692608fd) 2025-06-24 07:36:32 +00:00
c8569f3a23 2025-06-23 nightly release (c55eef79f8880e4b610a0ca6f6131e690dc948dd) 2025-06-23 07:37:05 +00:00
382411c3d8 2025-06-22 nightly release (1d993fa3092e4f0b5745f2470024b35cac96da14) 2025-06-22 07:35:28 +00:00
c3ffa66474 2025-06-21 nightly release (ac86ec0e60370c037e018137f2048cafd47c5c28) 2025-06-21 07:34:55 +00:00
a64e2e686a 2025-06-20 nightly release (f7a5ad6c2987ee5a83aa5d868cee3b8067d3de94) 2025-06-20 07:35:51 +00:00
5622038e20 2025-06-19 nightly release (2625c70aecc6eced1dbe108279feab7509733bef) 2025-06-19 07:36:07 +00:00
19e7984774 2025-06-18 nightly release (2625c70aecc6eced1dbe108279feab7509733bef) 2025-06-18 07:36:44 +00:00
d159881b7f 2025-06-17 nightly release (6e2992a9984f2c3f6469564008c7e45869b84678) 2025-06-17 07:37:08 +00:00
9ce4d95e84 2025-06-16 nightly release (c620d0b5c7e8679413d620624725471223ce8359) 2025-06-16 07:37:01 +00:00
74d0136772 2025-06-15 nightly release (6ebe9a4f47e9cd1c9ccd467bcdfdea9445fd98d6) 2025-06-15 07:34:46 +00:00
c3882619c2 2025-06-14 nightly release (a6084b71edb8d2856356724b5e71c4e2a861867f) 2025-06-14 07:34:19 +00:00
58a15e1c08 2025-06-13 nightly release (020da744370f6ee23e377357e9acc330b5610a67) 2025-06-13 07:35:55 +00:00
c07ea91237 2025-06-12 nightly release (b00b641ff1fbfa8f4f6152ffc631e0d24145a7a8) 2025-06-12 07:35:56 +00:00
3017faa76e 2025-06-11 nightly release (3040ca6d0f8558e39919b14eebeacc34ddf980f5) 2025-06-11 07:36:00 +00:00
0f61f5f414 2025-06-10 nightly release (3863bbb55b38985c7d64c8a0be7beb2005a9cc07) 2025-06-10 07:36:35 +00:00
724da4eecc 2025-06-09 nightly release (be2ad70cfa1360da5c23a04ff6ca3480fa02f278) 2025-06-09 07:36:29 +00:00
843156205e 2025-06-08 nightly release (f6e18bc1054624bb148632a85c10371d6cc62492) 2025-06-08 07:34:24 +00:00
e9e2e1992d 2025-06-07 nightly release (83d22256f84232c5440b25a08459c649a32b9a4f) 2025-06-07 07:34:11 +00:00
975b4d9707 2025-06-06 nightly release (e895e9689c625cbcd8f46880115e0d093713fa37) 2025-06-06 07:35:46 +00:00
d53869579d 2025-06-05 nightly release (a01bb9da14ad08853c7b0c1b9719b978cdbf66bc) 2025-06-05 07:35:55 +00:00
a6aa36e428 2025-06-04 nightly release (3e57de1251b879b595fba1d0b9e5eeac732d2137) 2025-06-04 07:36:01 +00:00
534604db0b 2025-06-03 nightly release (28cb3c0fe5dec58c595617066acd8bd082aa867e) 2025-06-03 07:36:30 +00:00
64247892a0 2025-06-02 nightly release (0d0058d90de410cbc998089eb5e475776d2ad55d) 2025-06-02 07:36:51 +00:00
b864161782 2025-06-01 nightly release (5616fa4a68718ead203314a3467f7dd9547153ae) 2025-06-01 07:34:50 +00:00
4612dbdec6 2025-05-31 nightly release (1193bf085574f5f0836b6ca7d84e3c8cb1f2d0c3) 2025-05-31 07:34:30 +00:00
5f945e8ea7 2025-05-30 nightly release (0c6c7780d9563b555d8b2948a005170044142537) 2025-05-30 07:35:17 +00:00
b122c5cdee 2025-05-29 nightly release (b394c6e89c2f7986274e405ec8f91c12fa52b5e2) 2025-05-29 07:35:25 +00:00
f2ad693453 2025-05-28 nightly release (54f1f29fedd3321f2398b6b34c82cc6355d7ba56) 2025-05-28 07:35:52 +00:00
b40585022f 2025-05-27 nightly release (f8010e7b934ab5f289a9d0f92168476882d497d4) 2025-05-27 07:35:58 +00:00
b5ad205e0e 2025-05-26 nightly release (8c16d0e4047a8ac5885baf52e8779fb3e36f2987) 2025-05-26 07:42:06 +00:00
3560b8ebe9 2025-05-25 nightly release (53ecb8159aa28b3c015917acaa89604cfae0d2c6) 2025-05-25 07:35:03 +00:00
fc4fe48f43 2025-05-24 nightly release (76ed9db4682c2ca75116de1547bfc298fafd132d) 2025-05-24 07:34:21 +00:00
4d514e404f 2025-05-23 nightly release (c1055f41a67ef9e76626fa14eea38073f4a09b62) 2025-05-23 07:35:40 +00:00
5f5ca60410 2025-05-22 nightly release (d1fe198df60cac55c70ad4eb3f8b70f8a556b4c1) 2025-05-22 07:35:29 +00:00
08ff525349 2025-05-21 nightly release (531d8f5fb6ea6e1fcac011e5af09067303839750) 2025-05-21 07:35:43 +00:00
9e910e020f 2025-05-20 nightly release (5ef90e14a3e4b26ec9247e5af03de493f20e79a7) 2025-05-20 07:37:18 +00:00
3b875c25ea 2025-05-19 nightly release (2ade886412fcf80b6f681ba660e6f9743489d492) 2025-05-19 07:35:50 +00:00
42f44e6d06 2025-05-18 nightly release (8568dbce1d7775d37c3f2dcc4073d3f44968fc43) 2025-05-18 07:34:25 +00:00
c44eb22680 2025-05-17 nightly release (084c4aa6140fa3e3ae66b09ffe893df841fa06da) 2025-05-17 07:34:07 +00:00
bab64901cc 2025-05-16 nightly release (d1f1ff8610d6baa9f9ce132fc082a10ffa2a9854) 2025-05-16 07:35:29 +00:00
a84c4fe2bb 2025-05-15 nightly release (014726d9d313fd6bf92cad6afb6933d2eb0cdf8a) 2025-05-15 07:35:28 +00:00
46a5746426 2025-05-14 nightly release (d759a517af3e6b2337bf8f8e0d1734e64e470f1b) 2025-05-14 07:35:30 +00:00
8b228c20ce 2025-05-13 nightly release (7243c69421cd0b868f3fa3b552c17e9c8b3023a1) 2025-05-13 07:35:48 +00:00
c8d468403b 2025-05-12 nightly release (032ef487258d8bbcae40fc580cdbf47398706aa4) 2025-05-12 07:35:34 +00:00
c3b3e52305 2025-05-11 nightly release (1f5cf19f56734aa92f1757b1afdbdbbdb198b807) 2025-05-11 07:34:04 +00:00
aff1eebd2b 2025-05-10 nightly release (180cbf46f2d2c2ceba608ad2722a6c9546e3fada) 2025-05-10 07:34:08 +00:00
8fc591edea 2025-05-09 nightly release (ab829ec6290b92b6f2c2519a9a922c1b06d0981a) 2025-05-09 07:34:56 +00:00
cd09210540 2025-05-08 nightly release (6f6fac6a416b64221031d32e6c1e4df145df45f5) 2025-05-08 07:35:58 +00:00
0ef95aa4f2 2025-05-07 nightly release (dfcfad2112933cc34247421ac0a4d3f19a1806c1) 2025-05-07 07:35:42 +00:00
02417607d1 2025-05-06 nightly release (1d7728056b9d9492930f56f7ad535bf927c7a4e8) 2025-05-06 07:35:40 +00:00
edcbbadc9a 2025-05-05 nightly release (0ffd31dc8a514d9ecb1dc653725e359ad37d8faa) 2025-05-05 07:36:04 +00:00
5ca255eb29 2025-05-04 nightly release (e889937850759fe69a8c7de6326984102ed9b088) 2025-05-04 07:33:51 +00:00
0217311e9a 2025-05-03 nightly release (457fa820ad538c7aeadb68f0ec418d63972ba1ee) 2025-05-03 07:33:51 +00:00
b4bd238d58 2025-05-02 nightly release (ce94b212c71c53656d8a357b6d3f2487f5c8d60c) 2025-05-02 07:34:41 +00:00
8c7f92885f 2025-05-01 nightly release (3849fd13de1a5ee727fd64351edb0d3b1bb637a9) 2025-05-01 07:34:40 +00:00
43be1858b1 2025-04-30 nightly release (e872bf8f888bdbb27a03e03935db61babf7180b8) 2025-04-30 07:34:46 +00:00
d2a7a65bf4 2025-04-29 nightly release (e5f4356a258d646df59aa350845c8ff913463ccf) 2025-04-29 07:35:38 +00:00
0c03652153 2025-04-28 nightly release (98bd2bd1abc7e4f18693a4bbe52f68b8d47019ac) 2025-04-28 07:47:12 +00:00
140c4197f8 2025-04-27 nightly release (a0d440a26a555c34e87b90bef3bff960b34bb180) 2025-04-27 07:33:53 +00:00
6a2d29f997 2025-04-26 nightly release (65b845f82b42e865e12c6a26dc6c749db0400973) 2025-04-26 07:34:01 +00:00
e0ba40b8b1 2025-04-25 nightly release (1a6d50d40799ba05b17254b236a6bf8817dd7d06) 2025-04-25 07:34:34 +00:00
0c4b0ec0c0 2025-04-24 nightly release (4ac2ee573d941e573b6648876bf215af609dccbe) 2025-04-24 07:35:07 +00:00
dab7e57003 2025-04-23 nightly release (cd021d048e4645d1bf9f193587ca0eda377a2721) 2025-04-23 07:37:06 +00:00
b182d84228 2025-04-22 nightly release (01f1cc44cbbfdf6307aa01b803a4ee22f9ade946) 2025-04-22 07:35:08 +00:00
6597b2807c 2025-04-21 nightly release (8eb21dffa9b1d0b55756ef94628f71bccfd5bbe9) 2025-04-21 07:34:47 +00:00
4b7863fc9f 2025-04-20 nightly release (a40e876b08277795a6552cf5e77e8649237c6812) 2025-04-20 07:34:08 +00:00
d3eb151b05 2025-04-19 nightly release (adf5f38eae0449b48a0356aa7f0e35c762525be5) 2025-04-19 07:33:54 +00:00
491e7e6ba7 2025-04-18 nightly release (3ed5f1fb77669c8ac5d02e7acc0218e31b71c0b6) 2025-04-18 07:35:16 +00:00
fcaf1a59e6 2025-04-17 nightly release (300e0ee13c08ef77e88f32204a2e0925c17ce216) 2025-04-17 07:35:32 +00:00
de772dae0a 2025-04-16 nightly release (3a90fd481e050cb73eff4e99ed16135b344baf69) 2025-04-16 07:34:40 +00:00
a58c844d04 2025-04-15 nightly release (e178a3aa9441a65aa6bfa079c2c23e515971fae6) 2025-04-15 07:36:12 +00:00
4fd036eb33 2025-04-14 nightly release (1a1a32ce5af880709a761c4cd9e9e43fb67e5058) 2025-04-14 07:34:56 +00:00
160a26e1a3 2025-04-13 nightly release (1a1a32ce5af880709a761c4cd9e9e43fb67e5058) 2025-04-13 07:33:47 +00:00
499ae6a4d4 2025-04-12 nightly release (1a1a32ce5af880709a761c4cd9e9e43fb67e5058) 2025-04-12 07:33:54 +00:00
a2033c7442 2025-04-11 nightly release (1a1a32ce5af880709a761c4cd9e9e43fb67e5058) 2025-04-11 07:34:26 +00:00
a094b3ad3e 2025-04-10 nightly release (cd80778ac8636db99685d792cbc93374f574d38b) 2025-04-10 07:35:22 +00:00
dcea61f3d2 2025-04-09 nightly release (4447352e6499c28c17f4d48d40c2b1cc3d2863a5) 2025-04-09 07:34:53 +00:00
37bbfdc278 2025-04-08 nightly release (836955bdbdeb299e6937065299564fb44ec422c2) 2025-04-08 07:34:33 +00:00
2b568a51f1 2025-04-07 nightly release (d98575806ba3f2b67439c241e980df8f98923f44) 2025-04-07 07:35:43 +00:00
881e8b6bda 2025-04-06 nightly release (15768cc34b9cdafdac645b9c22806e0bf4e74100) 2025-04-06 07:34:21 +00:00
51ba1b337c 2025-04-05 nightly release (7ac81868513a212af6be4a05e2f921cafeeb3069) 2025-04-05 07:34:34 +00:00
56d317d3a7 2025-04-04 nightly release (f9f6c080d8309ac1c5a546a47571389bac0b922c) 2025-04-04 07:34:17 +00:00
e374008e67 2025-04-03 nightly release (532530be34bf396cc632b5c8d95253953e3f7717) 2025-04-03 07:34:28 +00:00
f2ce02c559 2025-04-02 nightly release (5734909f343ab1de44ed5ab23311d43a9c6afaed) 2025-04-02 07:34:31 +00:00
365763495b 2025-04-01 nightly release (827b730f4e1cf172c7ba228a2efd268149163d52) 2025-04-01 07:34:55 +00:00
d5d5207db1 2025-03-31 nightly release (cbc0964636b382e096e4a048909c3fbf4ff82e59) 2025-03-31 07:34:42 +00:00
de61ed1823 2025-03-30 nightly release (52135db69a5b02bb9e5120a5fa410c303f649dfe) 2025-03-30 07:34:05 +00:00
3de3814e5e 2025-03-29 nightly release (d670df356cc27af60b09cb2653dd53a9b92790dc) 2025-03-29 07:33:50 +00:00
dac34906ab 2025-03-28 nightly release (d670df356cc27af60b09cb2653dd53a9b92790dc) 2025-03-28 07:34:40 +00:00
425df71311 2025-03-27 nightly release (114d404b0720e8073748690faeb96449e5c0b229) 2025-03-27 07:34:19 +00:00
41d214b5d4 2025-03-26 nightly release (45b11730f10f64171a9861c98782e1875bad87c9) 2025-03-26 07:34:22 +00:00
3794824ceb 2025-03-25 nightly release (59d5cf083b4f860dea76fe8936076177f9367f10) 2025-03-25 07:35:11 +00:00
139ce10879 2025-03-24 nightly release (de3aca331104776bdfac3f053511b7af442b4038) 2025-03-24 07:34:41 +00:00
9164cb4fbf 2025-03-23 nightly release (621c801f786a0fb24766f8b30b5d3e08b5c25fd3) 2025-03-23 07:34:02 +00:00
233790922b 2025-03-22 nightly release (01b1d1f91b9ee15051489da6d4e9377459ad692c) 2025-03-22 07:34:00 +00:00
6c7ce1b7be 2025-03-21 nightly release (0692301e25f0e0d64b4102819a4c0a4d8fe51b53) 2025-03-21 07:34:04 +00:00
c15acc148e 2025-03-20 nightly release (02e21c78543d0861305382b84c43936c90274eab) 2025-03-20 07:33:48 +00:00
2745ea1c97 2025-03-19 nightly release (790f93db3a5474a26d99e587016815c987566b49) 2025-03-19 07:34:22 +00:00
f66f3718dc 2025-03-18 nightly release (790f93db3a5474a26d99e587016815c987566b49) 2025-03-18 07:34:34 +00:00
3f3f7f67bc 2025-03-17 nightly release (6b1b95ad2acc9404307142d1f050e0e1b5815f99) 2025-03-17 07:34:39 +00:00
1184d9a6aa 2025-03-16 nightly release (f80bee4934dc2d6c8031f481d699cd4832a1a932) 2025-03-16 07:33:51 +00:00
a7848a5ca9 2025-03-15 nightly release (740ce0fa5f8c7e9e51422b614f8187ab93a60b8b) 2025-03-15 07:33:48 +00:00
cc803031f2 2025-03-14 nightly release (e8d36019d428a3c298bedc53d692a81e2f9abd26) 2025-03-14 07:34:00 +00:00
d330b82ed6 2025-03-13 nightly release (f2d43d866cff717c1ce33074d44924be777188c5) 2025-03-13 07:33:50 +00:00
ea02d322f0 2025-03-12 nightly release (2a7e997b3f1805b077810d7fef87cabc4411eeea) 2025-03-12 07:34:14 +00:00
d962699548 2025-03-11 nightly release (295f2ed4d103017f7e19a7b8263ece606cd629db) 2025-03-11 07:33:52 +00:00
cdb42bd8cc 2025-03-10 nightly release (7ae0ce6360b6e4f944906502d20da24c04debee5) 2025-03-10 07:33:51 +00:00
ecc1272a4b 2025-03-09 nightly release (5245304f1ecd4e78bd11f5a5efa8ce12f3b52826) 2025-03-09 07:33:38 +00:00
acd10bbd23 2025-03-08 nightly release (85467ed063d284fa21a2f1d2adfec8fda544923d) 2025-03-08 07:33:48 +00:00
21169daf3a 2025-03-07 nightly release (9c9b05bc4f3661e7b781186e646c08254758a2d4) 2025-03-07 07:33:50 +00:00
e35f2fa1a1 2025-03-06 nightly release (1433bc145526949c84acf4ba5eaa1687cc2d72fe) 2025-03-06 07:34:37 +00:00
8afad6f64c 2025-03-05 nightly release (aade4fbd55a07aaa23dbdfe055d70cd503fd0059) 2025-03-05 07:34:24 +00:00
d13587dfbf 2025-03-04 nightly release (aade4fbd55a07aaa23dbdfe055d70cd503fd0059) 2025-03-04 07:34:06 +00:00
8a38a193e4 2025-03-03 nightly release (ab81ca5053440074dc7d8c46ae4775f62f662394) 2025-03-03 07:33:58 +00:00
96b99c3c6d 2025-03-02 nightly release (ce2f680e0009550ef0dc594f375d542662fcb7e5) 2025-03-02 07:33:55 +00:00
f2a3f7c4b3 2025-03-01 nightly release (08434df1f2f88c9770e59246caa2ff9c6f613270) 2025-03-01 07:33:49 +00:00
5679f4f9c4 2025-02-28 nightly release (1845e7d1f597850b39e2e2d509e7cf8f997462a3) 2025-02-28 07:33:42 +00:00
7c2f94413c 2025-02-27 nightly release (784902983e79fe2f0ae2d7a73a57f4f2338eb86b) 2025-02-27 07:33:48 +00:00
77d7c96844 2025-02-26 nightly release (9ad0ad6497973b3bd72c7b50101cf30d359b950e) 2025-02-26 07:33:57 +00:00
ca19385711 2025-02-25 nightly release (866dc45d3ca0094bb89729b8309eeb25f6c9b575) 2025-02-25 15:21:41 +00:00
2bc7eb9e50 2025-02-24 nightly release (bea72180ed75f522ce4fe5e723bc2112e0874732) 2025-02-24 07:33:42 +00:00
52b3292d38 2025-02-23 nightly release (bea72180ed75f522ce4fe5e723bc2112e0874732) 2025-02-23 07:33:39 +00:00
28e3ea64c0 2025-02-22 nightly release (84fcf1bb11aa2106facb4e44da2a4e295936755d) 2025-02-22 07:33:46 +00:00
9c302da50c 2025-02-21 nightly release (fe100c3c5bcb19899b1f92c3281e527ba1eb011e) 2025-02-21 07:33:48 +00:00
a1f23bcb2e 2025-02-20 nightly release (fea718f062be1ea0602bef36ff42f28df968fd06) 2025-02-20 07:33:51 +00:00
83692a2c20 2025-02-19 nightly release (454fbd5bbe52d241a0ac89dc37574a16d322a1c0) 2025-02-19 13:01:56 +00:00
f1fbb1edba 2025-02-19 nightly release (bd370c138a9378d807ad16228cc6a066f14a526d) 2025-02-19 07:33:50 +00:00
7604dd1102 2025-02-18 nightly release (71484a2106750a5388d0eb1094a2206595d0e7e6) 2025-02-18 07:33:45 +00:00
54ea81783c 2025-02-17 nightly release (ae351d4d0ee0676b81f58170595d016d40cd223f) 2025-02-17 07:33:55 +00:00
68fa1e1ce9 2025-02-16 nightly release (1677a3101959cd2ea5f811a023a2b8b0b9fc6c18) 2025-02-16 07:33:48 +00:00
73f9dbce08 2025-02-15 nightly release (4233a779603207f19033cd433d2961c93b932cb4) 2025-02-15 07:33:51 +00:00
68c826639e 2025-02-14 nightly release (f95bdf5e6c8ea482ba6f64d655513b6a191ac142) 2025-02-14 07:34:01 +00:00
c709115e6c 2025-02-13 nightly release (54e28b2a71dbe003ec03a72c4e829acc0cf25334) 2025-02-13 07:33:50 +00:00
4446d2c933 2025-02-12 nightly release (28a2ab6b8489a22f29bf27b8c691aa27c2ea1094) 2025-02-12 07:33:50 +00:00
90092b9b08 2025-02-11 nightly release (fe94ece375f46d09a223d9796cac02b0796f13f1) 2025-02-11 07:34:51 +00:00
5f7ce38e44 2025-02-10 nightly release (e8304f08fedc802a90f9361c30861f8c5aab946e) 2025-02-10 07:34:04 +00:00
9b43fab6c5 2025-02-09 nightly release (6a9a02acbe34a9d810c8bf56c865b9d0687a3051) 2025-02-09 07:33:44 +00:00
ae9c74989e 2025-02-08 nightly release (9c78fb920d51462bbb6fbdb82592b5963a56ae8a) 2025-02-08 07:33:58 +00:00
1323ac671b 2025-02-07 nightly release (fa0592b568ce35c391dfede8bede9e1ed67f33f5) 2025-02-07 07:33:54 +00:00
a20cc87f83 2025-02-06 nightly release (425804db2b59fe2653fbf0ece730a522865a9b2a) 2025-02-06 07:34:05 +00:00
d945c3e427 2025-02-05 nightly release (3c0d2bc262b51dc55377eee687064d91fb12696d) 2025-02-05 07:33:50 +00:00
48d01e495e 2025-02-04 nightly release (d3c7e4bb9cb570e997dd143817580283c634fab2) 2025-02-04 07:33:46 +00:00
ca2de2e26f 2025-02-03 nightly release (d80eef7c6d2efcbf4b60d555512c7db9da2a6e58) 2025-02-03 07:33:52 +00:00
738ebb45ca 2025-02-02 nightly release (7854299b27d55223e402651ce0d33b209a213589) 2025-02-02 07:33:48 +00:00
6fba2824ee 2025-02-01 nightly release (cde5ddfd1460644bf3f3cbe755b40d44bfc1891b) 2025-02-01 07:34:36 +00:00
edf08cb080 2025-01-31 nightly release (2d6f6637d31854bff0ec7dc6b82429f37e0da271) 2025-01-31 21:28:06 +00:00
5f15cdfe27 2025-01-31 nightly release (27e35de6c288bffad1b4d18b393579c1d1a95547) 2025-01-31 07:33:54 +00:00
b27bb24208 2025-01-30 nightly release (354fe48db9ef94c69db6d03d997a374048824f83) 2025-01-30 07:33:57 +00:00
e1e9393cdf 2025-01-29 nightly release (9fd6722fc9068eeaa176754acb315fc7e0f6416c) 2025-01-29 07:34:09 +00:00
e72d5a5817 2025-01-28 nightly release (c1161957a4122d1913aa00ce2c17c1cc7b83b309) 2025-01-28 07:33:41 +00:00
3127214173 2025-01-27 nightly release (b75afa2e2e492ea59502daa73196fb7454a75a1a) 2025-01-27 07:33:54 +00:00
f97e773692 2025-01-26 nightly release (90448f0128d8090a07325be24bd513c4d14bed6d) 2025-01-26 07:33:55 +00:00
66d78ef560 2025-01-25 nightly release (ef60de07a0d390ac06efaa41b6548118cf84e0ea) 2025-01-25 07:33:47 +00:00
16f149b177 2025-01-24 nightly release (6f07847efe94157b9ab0bdba080f952ecebfbdc3) 2025-01-24 07:34:06 +00:00
e96db3e274 2025-01-23 nightly release (5a18f1e1eb5ae9cf87f66a367f07fdb5211531c7) 2025-01-23 07:33:44 +00:00
76535beee0 2025-01-22 nightly release (f2cfe8b59f56b75f4aa313b3979af845a903f5f8) 2025-01-22 07:34:05 +00:00
1d5261b3a7 2025-01-21 nightly release (5fd881a5b67bf715f212dad8c7cacd4ddfb5746b) 2025-01-21 07:33:56 +00:00
37626ee0e6 2025-01-20 nightly release (6cb186e279bc179a6bb63f0226e24ab42a07b394) 2025-01-20 07:33:45 +00:00
9cf75bc595 2025-01-19 nightly release (8cc415774f47b5a50077f72ea493b71b8101e48d) 2025-01-19 07:33:43 +00:00
afeff746d3 2025-01-18 nightly release (64e54d5af638f6e427e8ad2355c45970400c437e) 2025-01-18 07:33:47 +00:00
ab2a8287ca 2025-01-17 nightly release (43a00d73b36494e052a82418182c63e18e9d9f69) 2025-01-17 07:33:50 +00:00
b3e16e3b7b 2025-01-16 nightly release (62ce3e6e84df516fdd5310d5095fa01251806f1d) 2025-01-16 07:33:47 +00:00
55bf78123d 2025-01-15 nightly release (e2251fffbb012303f1c47ead67fa733a49fd21b4) 2025-01-15 07:34:00 +00:00
68e1d954e3 2025-01-14 nightly release (dfe06e555d474d3ed597b67893e6ff270e2070f4) 2025-01-14 07:33:53 +00:00
32c95e44c6 2025-01-13 nightly release (87843ee9ab50778a98eda62fd7498d44c69488bd) 2025-01-13 07:33:42 +00:00
24d762795f 2025-01-12 nightly release (e0f67405a154e7f9ce1ca9533cbc1d156fe075d7) 2025-01-12 07:33:47 +00:00
ea2107b759 2025-01-11 nightly release (68dad26b9504d96bf7c247941dee67ffb1a781e9) 2025-01-11 07:33:53 +00:00
07c3c5e34c 2025-01-10 nightly release (9f09b719d33c61224ebb85baa369a8364063aa6f) 2025-01-10 07:33:48 +00:00
bd1e0e0018 2025-01-09 nightly release (dcc3cf7066b4d8cab63ecb73daf1e36b01220a4e) 2025-01-09 07:33:53 +00:00
a00264a930 2025-01-08 nightly release (28b4992e7a60bb3fbb07c591099fa810557b4e57) 2025-01-08 07:33:56 +00:00
de51f09432 2025-01-07 nightly release (301b9c8a90002fa621d93b108e54460066226629) 2025-01-07 07:33:58 +00:00
65e9e383bb 2025-01-06 nightly release (cb5fa17e4437d0cf20e2cde03bc1c753d49b1f69) 2025-01-06 07:33:47 +00:00
e67f93ae33 2025-01-05 nightly release (f2d6cfa6775601df5a038f7a4d0b37da75a53ed9) 2025-01-05 07:33:47 +00:00
0f7331ca63 2025-01-04 nightly release (0a94bb432ed75cc2d950d81b2921363218a7e459) 2025-01-04 07:34:05 +00:00
8c92c3c359 2025-01-03 nightly release (496fc909651ff4fc0d1933277c2d30524061471c) 2025-01-03 07:33:57 +00:00
f769c99631 2025-01-02 nightly release (a174ee2255aa181d8d39c167694db1eb1a2e0deb) 2025-01-02 07:33:53 +00:00
d4e525871c 2025-01-01 nightly release (a174ee2255aa181d8d39c167694db1eb1a2e0deb) 2025-01-01 07:33:45 +00:00
a1bc249b5f 2024-12-31 nightly release (8df99b6a6e1fb7b1e548ef01e8d21a652019ed28) 2024-12-31 07:33:43 +00:00
40b0741e91 2024-12-30 nightly release (2ed4d65af0a1993c0df7b081f4088d0f3614283e) 2024-12-30 07:34:20 +00:00
ad23baaa28 2024-12-29 nightly release (cf0b72c4ab960a847758132cc501cf793926e070) 2024-12-29 07:33:41 +00:00
b7f762e00d 2024-12-28 nightly release (d3e9133ab217b8b1f244cd78428d116f83d5806b) 2024-12-28 07:33:58 +00:00
b01ca2c28b 2024-12-27 nightly release (1cd70e7e237d03de3f50445ab0c7975d6749dc5e) 2024-12-27 07:33:50 +00:00
dd88b7cb14 2024-12-26 nightly release (4bacfd6e117f1e77dbce9444fc3ef3d95c5f2b89) 2024-12-26 07:33:54 +00:00
4ae33911b2 2024-12-25 nightly release (c0d710634fcce172490c3ace0de977829b38bc06) 2024-12-25 07:33:52 +00:00
993ac7f9d6 2024-12-24 nightly release (6ccb8ed1868984d9d2ea4e48a085508d1027cd9b) 2024-12-24 07:33:51 +00:00
b2d1835353 2024-12-23 nightly release (12662901aa8ced7d1118193e40dd4f018bb2e0fa) 2024-12-23 19:46:19 +00:00
b74622335a 2024-12-23 nightly release (eebc93d41eeffb936cbf20c9052e1e813d0cc052) 2024-12-23 07:33:45 +00:00
9f02401460 2024-12-22 nightly release (f1cbf4b1b5a299f999c11e77bfabe39c7f04efdc) 2024-12-22 07:33:41 +00:00
3333f84c71 2024-12-21 nightly release (b5e159270a3784bc60fcb18513f30af22521dcf0) 2024-12-21 07:33:48 +00:00
6b3b6ccebb 2024-12-20 nightly release (75661f2036d36fd7f869cd749eb6ef5fb40e4772) 2024-12-20 07:33:45 +00:00
61944adb69 2024-12-19 nightly release (2c48af568a082c70ca4ca2cdc9b6469ed253a371) 2024-12-19 07:34:07 +00:00
a5fc054c22 2024-12-18 nightly release (2ea4b56ec872424e486c4fe2d55da061067a2ed3) 2024-12-18 12:53:39 +00:00
7260a15470 2024-12-18 nightly release (c06b5048ba866e2dd39e5da5399fe8261322c7ca) 2024-12-18 07:33:51 +00:00
5764ca46ed 2024-12-17 nightly release (c15638d8035f706b29a402397fd6766025801032) 2024-12-17 07:34:49 +00:00
3f159d6357 2024-12-16 nightly release (af8789c05654477649e4d99e6a253a2ebd81ad9e) 2024-12-16 07:35:17 +00:00
9f9823e3d2 2024-12-15 nightly release (dd2d360b7d5dcc66660fdfe8da083a7077dada56) 2024-12-15 07:33:42 +00:00
587bb30c44 2024-12-14 nightly release (698eefadddb7c276b381ecb510f4bcc219212c25) 2024-12-14 07:34:11 +00:00
33229b026a 2024-12-13 nightly release (ceb664aca6861d1a05c3d5602b1d7086a1e2901a) 2024-12-13 07:34:15 +00:00
3c7f1c0b09 2024-12-12 nightly release (1dd6f2102936175c06468aa4a18fcf401b53b439) 2024-12-12 07:35:13 +00:00
3bcdb4ed6b 2024-12-11 nightly release (95e7fcf82e91fa965bf35790d06fbf69bb945f81) 2024-12-11 07:34:59 +00:00
96a8273b63 2024-12-10 nightly release (5d3bc633ff3fab550afd286d53697bb5aab1c60e) 2024-12-10 07:35:50 +00:00
66b9fcf5f6 2024-12-09 nightly release (7435f57f600ff083bd2d0efcf33eeda674d2caa1) 2024-12-09 07:35:41 +00:00
afb4be9252 2024-12-08 nightly release (2682e5e0d48a8200c1672b6a42250d3c8de44190) 2024-12-08 07:33:59 +00:00
21b0f3b9bd 2024-12-07 nightly release (7939b5f5f9b073984c26adef1446fa250a20bceb) 2024-12-07 07:34:19 +00:00
3dd667797a 2024-12-06 nightly release (80ca6dd892613fd4f1dee9040b8273ddeadb1c50) 2024-12-06 07:34:55 +00:00
e21ebc30af 2024-12-05 nightly release (b08bc07cd75d166ea4873a5ea24680bfd6204a76) 2024-12-05 07:35:07 +00:00
aa019efbc3 2024-12-04 nightly release (30d907c6fbb0e1ce81023e10f9f2b6bbc27bf59a) 2024-12-04 07:35:07 +00:00
7851460668 2024-12-03 nightly release (77748ed8ec2dea0deb6ffd59bd28329e6cdc9d56) 2024-12-03 07:35:18 +00:00
90b7dcf2c5 2024-12-02 nightly release (5deca07c0dcf1482eba99bf93b805cf1cc41ad6c) 2024-12-02 07:35:32 +00:00
798d5b7ddd 2024-12-01 nightly release (e29dabbd719b988208f329c22157ecf03be6cf56) 2024-12-01 07:34:27 +00:00
8712710221 2024-11-30 nightly release (e29dabbd719b988208f329c22157ecf03be6cf56) 2024-11-30 07:34:13 +00:00
f0427e7be1 2024-11-29 nightly release (b97a7861256188efb7d4cf862886db7e547040a9) 2024-11-29 07:34:43 +00:00
1fba97d21a 2024-11-28 nightly release (fea771dcce8508c5d71755da65d1b97d6da13475) 2024-11-28 07:35:07 +00:00
44c5ea24d5 2024-11-27 nightly release (9b89fa44bad4128b0be2b03b0d8c11ee800d3de6) 2024-11-27 07:35:04 +00:00
06d9768062 2024-11-26 nightly release (851edf208b308789087f263de739607ea25c8a0e) 2024-11-26 07:35:07 +00:00
b16d9ba310 2024-11-25 nightly release (2398e758d2493c8142d4b7f9c72207f05991e9e9) 2024-11-25 07:34:49 +00:00
b0a6940290 2024-11-24 nightly release (c513f01516673898d551818c8ca6085cf07e4006) 2024-11-24 07:34:33 +00:00
716ed475a1 2024-11-23 nightly release (44d5012a80e9b9c0f28dfc9927bfe8157412254d) 2024-11-23 07:33:50 +00:00
b8aad2e5e5 2024-11-22 nightly release (740d1eb0306f1f9d0ce81ea81f287a6b52738fab) 2024-11-22 07:34:41 +00:00
ac47a2d971 2024-11-21 nightly release (e0482fdf95eb3ce679fa442b50871d113ceb673b) 2024-11-21 07:34:45 +00:00
dd2e6d6140 2024-11-20 nightly release (cee3f8541e9fed93095e249d17f791b861548562) 2024-11-20 07:34:51 +00:00
a7f5ff1bc5 2024-11-19 nightly release (98e441f00b2e4dff5234df120e6194eab6773853) 2024-11-19 07:35:01 +00:00
37b96b77a5 2024-11-18 nightly release (c3fbec74bd92e1c4e97a6f2908cabb799d515ce7) 2024-11-18 07:35:13 +00:00
edeae2313f 2024-11-17 nightly release (2fc692b3dd42bf92c4f92dcec862bae7ae1c7995) 2024-11-17 07:34:29 +00:00
5d487aa9be 2024-11-16 nightly release (924c1fe3f304aa599b823fb549c35b7809f61086) 2024-11-16 07:34:02 +00:00
79da951f19 2024-11-15 nightly release (1c1d06a22c2534cca3c4c36f5e7947a67b0d1512) 2024-11-15 07:34:34 +00:00
a0d79cba8d 2024-11-14 nightly release (3ce75e7ea6706adf64d823d0c7ab2d04f657d43e) 2024-11-14 07:34:09 +00:00
1ba72852e9 2024-11-14 nightly release (70060b0927221c63cf08be896dbca6103f2be96e) 2024-11-14 03:11:36 +00:00
d195b9388e 2024-11-13 nightly release (39d1c91c33270f66cdf57bae2a19e7738cf8f77f) 2024-11-13 07:34:06 +00:00
555bc8349d 2024-11-12 nightly release (19eff28ff3f19b50da46f5a9ff5f4d4d213806fe) 2024-11-12 07:34:12 +00:00
9c847fb635 2024-11-11 nightly release (ffb979032dc149b4c895526fe5b92d713ed7b1e1) 2024-11-11 07:34:40 +00:00
77d07d22ef 2024-11-10 nightly release (5ef33e40b3c3fd2608552d3301c7255826c0e7f6) 2024-11-10 07:33:34 +00:00
30cf05fc13 2024-11-09 nightly release (cc44b55b0080759d85068aefa9c74e3bf49658e1) 2024-11-09 07:33:37 +00:00
29017895e1 2024-11-08 nightly release (4715b7700131618be5f439a8ad6b5ae94a201915) 2024-11-08 07:33:41 +00:00
0d5247caf3 2024-11-07 nightly release (cf0bb6c435c58db4c72e489f462e1a0ebe310f14) 2024-11-07 07:33:48 +00:00
5a05d8baca 2024-11-06 nightly release (63b01f328ef63a28b6c31e711d094b1a4b2b7d28) 2024-11-06 07:33:56 +00:00
a8b26c6b07 2024-11-05 nightly release (9039fbb47ecfc93df74a014a209e5929d10fd2a3) 2024-11-05 07:33:56 +00:00
76c297ad6b 2024-11-04 nightly release (e6ff07f00e04a9b58efb86a3dd70ed7280ae8522) 2024-11-04 07:34:15 +00:00
3c901e20a9 2024-11-03 nightly release (f121eab0182f7da58b39ffb84744bdc7109817e3) 2024-11-03 07:34:09 +00:00
a37bd4b1a2 2024-11-02 nightly release (8d1eaa3da663c2bdd626ff1aafa1c5d0ff6e1bf3) 2024-11-02 07:33:48 +00:00
3e0dd1351c 2024-11-01 nightly release (c8a648d4dffb9f0133ff4a2ea0e660b42105d3ad) 2024-11-01 07:34:23 +00:00
81d824d17a 2024-10-31 nightly release (70ba471957011c0ef9d35207b4fb63657e5f8d1a) 2024-10-31 07:34:05 +00:00
e47e879449 2024-10-30 nightly release (23d590e518688f96e1d1947a08e9ca27df3e67e4) 2024-10-30 08:48:13 +00:00
c787213d41 2024-10-29 nightly release (4ee514144bcc227c6d73192d49baf354eb5f321c) 2024-10-29 07:34:47 +00:00
7784111661 2024-10-28 nightly release (d2ec28978730db514931e4dc146f8188738ed1ff) 2024-10-28 07:34:27 +00:00
cef671f99b 2024-10-27 nightly release (00504aa6b8b0ae68761b89f023184202e8c79bc8) 2024-10-27 07:34:07 +00:00
36c366341b 2024-10-26 nightly release (a3de067975a61f11ccb8580416ea4d4a838cdb24) 2024-10-26 07:33:44 +00:00
bd5482c7c3 2024-10-25 nightly release (32a3dbc6450171dec4ef62a36037dd5dc24790d2) 2024-10-25 07:34:20 +00:00
f64544f7fa 2024-10-24 nightly release (dbd6ada8c3272e9aa36b5da0ee785da5e17bd650) 2024-10-24 07:34:14 +00:00
4e9fedafc2 2024-10-23 nightly release (f2ebf6d94ab8d9f6b1a1ba8c6d6d120d9e8a2156) 2024-10-23 07:35:03 +00:00
65a14490e8 2024-10-22 nightly release (89067402d40e70f70b66f734c830fcd450c665df) 2024-10-22 07:36:10 +00:00
5553778a00 2024-10-21 nightly release (82eb09aafd7e4ee6e4fb0580f2221ea6253d218b) 2024-10-21 07:34:43 +00:00
160d421a40 2024-10-20 nightly release (47e80abc7a9de6b5cdc20f7d1a8afb68c639d764) 2024-10-20 07:34:10 +00:00
64b61eb313 2024-10-19 nightly release (e8b1409dcfbc7609110c9c332d6038a957b8dd9f) 2024-10-19 07:33:53 +00:00
c2c91fc8a4 2024-10-18 nightly release (7365a57dc05cc500b5ec910ae008bb168dbadd08) 2024-10-18 07:34:10 +00:00
e4dcf5b50e 2024-10-17 nightly release (fe43f72be7e0afd7fc6e5eee5dfdfb2968e66f32) 2024-10-17 07:34:19 +00:00
1655e904b5 2024-10-16 nightly release (75109682b66267d35ed2a5235ef50348926a336c) 2024-10-16 07:34:18 +00:00
ec8499a174 2024-10-15 nightly release (ed94725b8c5d70b31659d10775c011a23cbcb464) 2024-10-15 07:35:42 +00:00
2fcfb44b7e 2024-10-14 nightly release (41977a05314bbf537e1c5d6cf5916a368d1907d9) 2024-10-14 07:35:48 +00:00
380701ee2e 2024-10-13 nightly release (e89fe0bd6e827435ce436af5df93edb4494f6def) 2024-10-13 07:33:49 +00:00
de761c548d 2024-10-12 nightly release (c254901bdb6dcf8be91b41016c97cf685145fc1a) 2024-10-12 07:33:49 +00:00
e767966307 2024-10-11 nightly release (8321eec009c8c79145ebccd51fdfc336e5f8b848) 2024-10-11 07:34:11 +00:00
2ca8a3bdd1 2024-10-10 nightly release (d9f4a7d3f9678e9492ee1485987370973a55bef9) 2024-10-10 07:33:54 +00:00
7a558ac7ae 2024-10-09 nightly release (23c531b3e903764d06601e2f94f05b61038db83c) 2024-10-09 07:33:58 +00:00
1217ddad52 2024-10-08 nightly release (ff95ff5d38d5133ce74543f8a7a769b6dd9e5d72) 2024-10-08 07:34:05 +00:00
d7f1e2141a 2024-10-07 nightly release (d1b87e26e5c4343f5b56bb1e6f89b479b389bfac) 2024-10-07 07:33:51 +00:00
c9a3e50fbf 2024-10-06 nightly release (4061910ba26f3d81bcde29251677bda1dffbe734) 2024-10-06 07:33:44 +00:00
c327365f95 2024-10-05 nightly release (d6f340f66c2a1b0bdf22377d10b073dd0e5a95d6) 2024-10-05 07:33:47 +00:00
e8a3a16596 2024-10-04 nightly release (88e338f4dddd8fa3eac9c63a4b742dfe8aad0302) 2024-10-04 07:33:59 +00:00
76cfea5e2c 2024-10-03 nightly release (5c0ce8d0a6423472959cf17efa92b20160c6f6a5) 2024-10-03 07:33:57 +00:00
4b2970f7cd 2024-10-02 nightly release (18525e185e211b3eab44c67a688e5df8396f6f97) 2024-10-02 07:33:44 +00:00
972a8333fc 2024-10-01 nightly release (156ca01e51f766b1b069c5c6f3d57112a5c8f9ff) 2024-10-01 07:33:57 +00:00
c8887ca3bc 2024-09-30 nightly release (156ca01e51f766b1b069c5c6f3d57112a5c8f9ff) 2024-09-30 07:33:49 +00:00
f8a6ada8af 2024-09-29 nightly release (1d6e0412f5205b1cd709e034526d7f21d6f2d56f) 2024-09-29 07:33:48 +00:00
32ef0376fd 2024-09-28 nightly release (f42e88fea5915012ddb4cd9530f367524bf57019) 2024-09-28 07:33:49 +00:00
5ba404f687 2024-09-27 nightly release (aa56f80ec1d690f40a38893bfe72b88dd34aa7f7) 2024-09-27 07:33:57 +00:00
29f97eb575 2024-09-26 nightly release (76b044d7cb987081aaaf5b381b74c93543023f7f) 2024-09-26 07:33:58 +00:00
21a304b17f 2024-09-25 nightly release (780f4debdbb51347da5b416416392e5849cbfec5) 2024-09-25 07:33:45 +00:00
5fe833157e 2024-09-24 nightly release (e09c5b6046bbe86958f066b09adeed8435d7d518) 2024-09-24 07:33:51 +00:00
64141411e0 2024-09-23 nightly release (f276da7f988b12fad74594aa054197371be17184) 2024-09-23 07:33:48 +00:00
4b5ae4de44 2024-09-22 nightly release (9731ccb9e0c9f09dcd3435a47fc40fe6fb960504) 2024-09-22 07:33:48 +00:00
4d07616615 2024-09-21 nightly release (1a10751731784942dcbb9c0524c1369a29d45244) 2024-09-21 07:33:57 +00:00
4c73f163cd 2024-09-20 nightly release (fe0e9fb385d6e75a01c9feaa459252db09b71839) 2024-09-20 07:33:48 +00:00
89957684e0 2024-09-19 nightly release (e037bb326fdafca243bdb08023bbef93b29a4513) 2024-09-19 07:33:51 +00:00
900998e06b 2024-09-18 nightly release (48d18fbd4cf785e1f69a6555d97a39023a5d199e) 2024-09-18 07:34:22 +00:00
aec9b2ab77 2024-09-17 nightly release (37a08b33bb86ad0fce3db6fc98f90c9e02d77cba) 2024-09-17 07:33:53 +00:00
79d8db5004 2024-09-16 nightly release (c64ae601ba9eb3ad2cd3402a14f6ac83c0ab7eba) 2024-09-16 07:34:04 +00:00
458f88a3af 2024-09-15 nightly release (c64ae601ba9eb3ad2cd3402a14f6ac83c0ab7eba) 2024-09-15 07:33:41 +00:00
a36aa26d30 2024-09-14 nightly release (081c4a966de77dd5187c1cb97d287cae6f3fbc02) 2024-09-14 07:33:47 +00:00
cc185792f2 2024-09-13 nightly release (0cdc6a8dcd7e294b01d8914385bbe45e79c1770d) 2024-09-13 07:34:19 +00:00
f23873cf88 2024-09-12 nightly release (a130ed828ab879aa4a8c3fcaba5148e872ccaf42) 2024-09-12 07:33:46 +00:00
c42ec0f9b5 2024-09-11 nightly release (26e5572dd24acd69b08eb94577f8796a480c9fe0) 2024-09-11 07:33:46 +00:00
584f674aa0 2024-09-10 nightly release (7ec17b49cf89cfeb97272a1baddcc30fa6fa66d8) 2024-09-10 07:33:42 +00:00
995ec16c7a 2024-09-09 nightly release (31c4e0d37d8efc37a0697159e5b9121ec34d5141) 2024-09-09 07:33:44 +00:00
5678da6c98 2024-09-08 nightly release (3bebc09be9845c0779f190489e8d4caa9e2653c8) 2024-09-08 07:33:41 +00:00
9515104a2b 2024-09-07 nightly release (3d734d837be7b065dc36ae808699459c8e73dda3) 2024-09-07 07:33:42 +00:00
e4f495cdb0 2024-09-06 nightly release (06a7dc21c1005750598c37f3adbc031183c74de6) 2024-09-06 07:33:52 +00:00
90cf7dcfbb 2024-09-05 nightly release (a7a53b796b1fcde9a52c86d29d88e12bbf2b4092) 2024-09-05 07:33:46 +00:00
48d4dac077 2024-09-04 nightly release (e000cf0ad980e5d140dc895a646174e9b945cf26) 2024-09-04 07:34:03 +00:00
704c6fc1cb 2024-09-03 nightly release (c140fa1426603322a5a69ef91300f13489db5970) 2024-09-03 07:33:42 +00:00
e8379aab48 2024-09-02 nightly release (c140fa1426603322a5a69ef91300f13489db5970) 2024-09-02 07:33:54 +00:00
00e3eea170 2024-09-01 nightly release (caa04e0caeb5736e9094f6e0667c03585cc90724) 2024-09-01 07:33:53 +00:00
bed3938804 2024-08-31 nightly release (86e03a64e1e9570adfb574bc406cc5f3d8d90209) 2024-08-31 07:33:44 +00:00
603ac39af5 2024-08-30 nightly release (0d5f9787959efa671d390d5ae02a445a3314178c) 2024-08-30 07:33:47 +00:00
e4cd76cf82 2024-08-29 nightly release (41e36e2b46e05b8718ca724f1780040783c621d2) 2024-08-29 07:33:45 +00:00
de66933aaf 2024-08-28 nightly release (55236d0cb7d183f2b64982d8718e9012ae8a40d8) 2024-08-28 07:33:44 +00:00
7e19f8fbcd 2024-08-27 nightly release (55236d0cb7d183f2b64982d8718e9012ae8a40d8) 2024-08-27 07:33:44 +00:00
3bca514ed8 2024-08-26 nightly release (2553278bae5993bd94bae4f04bf4586fb3f30d57) 2024-08-26 07:33:54 +00:00
aa1fc68d51 2024-08-25 nightly release (2553278bae5993bd94bae4f04bf4586fb3f30d57) 2024-08-25 07:34:01 +00:00
d1c7d770de 2024-08-24 nightly release (2553278bae5993bd94bae4f04bf4586fb3f30d57) 2024-08-24 07:33:42 +00:00
5ddb842eb9 2024-08-23 nightly release (255cd75a974389985a7a542be1455210a9c369e2) 2024-08-23 07:34:34 +00:00
ef01a09a2c 2024-08-22 nightly release (255cd75a974389985a7a542be1455210a9c369e2) 2024-08-22 07:33:52 +00:00
401217e3fd 2024-08-21 nightly release (c42ac54d9e817bf0a0366eb78e6c8beba4d5eff5) 2024-08-21 07:33:51 +00:00
02f175d628 2024-08-20 nightly release (92151c814ba715fe7d1f5648b0ae6950dceee6b7) 2024-08-20 07:33:41 +00:00
d6eae72783 2024-08-19 nightly release (ae000635700e78161e0ed1a18f62b5db4030e343) 2024-08-19 07:33:48 +00:00
748db193d7 2024-08-18 nightly release (b4a1673a6741e183856cf3503f0574d3ac881ce0) 2024-08-18 07:33:38 +00:00
77b78fab0a 2024-08-17 nightly release (12b8e29203dfb8f9c3b1a4c438aac4981a0899ae) 2024-08-17 07:33:42 +00:00
c89b05606e 2024-08-16 nightly release (29c4b4ea5a23ec41b98def0f50a0294c0bdc5c18) 2024-08-16 07:33:42 +00:00
0f8aad57ae 2024-08-15 nightly release (3965f118376a01382b0d1d3eea612188c4847ec8) 2024-08-15 07:33:46 +00:00
3a023a67c4 2024-08-14 nightly release (c2eeda5da0edfbb59673a229b085a80945514fc6) 2024-08-14 07:33:39 +00:00
839ac8af16 2024-08-13 nightly release (d4b31f7bcf962560768b1ee24a2ee5a1e914b8ba) 2024-08-13 07:33:53 +00:00
954b7d3faa 2024-08-12 nightly release (e7b870c88bc3b854a95399a96a274d2f1f908172) 2024-08-12 07:33:41 +00:00
22f2d2c094 2024-08-11 nightly release (a7912bf9dc39b934baf5e04b436cc2134776c10d) 2024-08-11 07:33:35 +00:00
db707c9b5a 2024-08-10 nightly release (05de2b2d0fd582e102f469e55a8a7212aef94a70) 2024-08-10 07:33:34 +00:00
94cf247e0e 2024-08-09 nightly release (bb6eef8ed1de0eb48bde10a07da57b6acc82fb05) 2024-08-09 07:33:42 +00:00
7d5462bfd6 2024-08-08 nightly release (ac6398b630db42570228b95ffbdeab12b7684357) 2024-08-08 07:33:49 +00:00
6d2050e0db 2024-08-07 nightly release (919e38424729fd9499f99a7d9e0ed994669954ef) 2024-08-07 07:33:45 +00:00
ed158d3e1a 2024-08-06 nightly release (345bea01dc69d9c31a4954c299767ccecd702d3d) 2024-08-06 07:33:43 +00:00
b9251a4598 2024-08-05 nightly release (c7cfa5172139737bf75afbd4a7920b1a02b1dcb2) 2024-08-05 07:33:44 +00:00
d6ea1eb2bc 2024-08-04 nightly release (6ec4af6865dd884f984c9dbcb273ae26e3825481) 2024-08-04 07:33:35 +00:00
0cf4254a75 2024-08-03 nightly release (419b76c4ac80c8b1c95120cd52db622333a3a688) 2024-08-03 07:33:43 +00:00
3974b13308 2024-08-02 nightly release (93979e70631ae90afe26c25ef620b311c9b6a8f5) 2024-08-02 07:33:37 +00:00
2beeb996cb 2024-08-01 nightly release (f0da167ce55ddbbe4f0138ed0f0804008069ddad) 2024-08-01 07:33:47 +00:00
e84a29505c 2024-07-31 nightly release (afb04d78c822bbad35b9c72cb21408525dc186f9) 2024-07-31 07:33:42 +00:00
ea5d26ff81 2024-07-30 nightly release (05a8540041cea936a63355c2e38b7b3beb5ce168) 2024-07-30 07:33:43 +00:00
0e29c08c0d 2024-07-29 nightly release (500aea8d5033fd3540c6ed325dd80e7e1420b0f3) 2024-07-29 07:33:36 +00:00
246f9c93b2 2024-07-28 nightly release (8f5cf464054a191c23219e49cc6e3203b2396cfb) 2024-07-28 07:33:36 +00:00
092902bbd4 2024-07-27 nightly release (28fd2e905da8484de38faf4e04073e896f7a95f7) 2024-07-27 07:33:39 +00:00
4e12f36799 2024-07-26 nightly release (58b8704f28f569fc08fadacc12830346251dd297) 2024-07-26 07:33:57 +00:00
46fcfc993e 2024-07-25 nightly release (14495ce28876e6c19733d32e07f913dba6b55771) 2024-07-25 07:33:39 +00:00
d3def8fd79 2024-07-24 nightly release (3aa45cae7750070c25e99fc49409ab311aa7db0b) 2024-07-24 07:33:43 +00:00
d9e6b14e89 2024-07-23 nightly release (406f510f89dd7561eec8da9321d8572bfad64e72) 2024-07-23 07:33:48 +00:00
13bc64a43d 2024-07-22 nightly release (c2425a3b572328c6c1fdadc080f8a83c6357f945) 2024-07-22 07:34:28 +00:00
571ca813dc 2024-07-21 nightly release (1d1d074072ecb0aa6ca95e3f43221d2275e16d74) 2024-07-21 07:33:39 +00:00
2c2b18cf11 2024-07-20 nightly release (7c299b46ca4d056835f7da0d6950939106469edb) 2024-07-20 07:34:08 +00:00
c0f37f35d9 2024-07-19 nightly release (367213a608528ee74e67e03bf11f775e263ef480) 2024-07-19 07:33:36 +00:00
5147aeb49a 2024-07-18 nightly release (874bbc53c9dfdf108e8a8af63109c06f9ff7f6e4) 2024-07-18 07:33:37 +00:00
bd12fbe652 2024-07-17 nightly release (7919f0b952272b83886021b172a8d56a893917ba) 2024-07-17 07:33:41 +00:00
4e39cdceb1 2024-07-16 nightly release (9ab8d47f9d27f6f4102e7786fcd604861d8e480c) 2024-07-16 07:33:36 +00:00
1c036b3ea4 2024-07-15 nightly release (6f275ae4d06b82a2b41a04516d793ad15d45955a) 2024-07-15 07:33:45 +00:00
c02ee0cb6a 2024-07-14 nightly release (f9f85bfc0b5b63274fa3fdd22afb0a456abf53f4) 2024-07-14 07:34:08 +00:00
e4e5c41b7e 2024-07-13 nightly release (d727e2f2d12c825879acbea10abb7b08bc8b0691) 2024-07-13 07:33:32 +00:00
f8279e35f3 2024-07-12 nightly release (18b7633bfbf859904222aad90006b54ae562766c) 2024-07-12 07:33:35 +00:00
a9a7d1e7ee 2024-07-11 nightly release (ceedee23ec2cec14d12c2edd106d9c239dae95a7) 2024-07-11 07:33:41 +00:00
0bf00552b8 2024-07-10 nightly release (007e75958fd2e822b5266df500cf791c14b92b62) 2024-07-10 07:33:45 +00:00
fc864dcf35 2024-07-09 nightly release (1e61cb8c871c876b6f8166d173dabd1440bda4f0) 2024-07-09 07:33:45 +00:00
32e74ed040 2024-07-08 nightly release (7128504424ca54311efdf22f2c8425291586860e) 2024-07-08 07:33:56 +00:00
1a71eb647b 2024-07-07 nightly release (7128504424ca54311efdf22f2c8425291586860e) 2024-07-07 07:33:39 +00:00
cb92c66e9c 2024-07-06 nightly release (7128504424ca54311efdf22f2c8425291586860e) 2024-07-06 07:34:09 +00:00
419de13620 2024-07-05 nightly release (7128504424ca54311efdf22f2c8425291586860e) 2024-07-05 07:33:46 +00:00
a532737602 2024-07-04 nightly release (9108b74bbc62eeb48e668ea67525445d01568c65) 2024-07-04 07:33:38 +00:00
6d35c780e0 2024-07-03 nightly release (3b77b122c5ce3033df1e1d541de2a3d420225c9d) 2024-07-03 07:34:19 +00:00
d7ee373e90 2024-07-02 nightly release (9ae78a578caff195821ad535a9e8d8ef59552142) 2024-07-02 07:33:46 +00:00
7b2cee6dc9 2024-07-01 nightly release (9ae78a578caff195821ad535a9e8d8ef59552142) 2024-07-01 07:34:15 +00:00
f28393cfc9 2024-06-30 nightly release (9ae78a578caff195821ad535a9e8d8ef59552142) 2024-06-30 07:33:39 +00:00
e0d8e538f0 2024-06-29 nightly release (a32ce5ce344b384ad4c6642895835c10ae7872c8) 2024-06-29 07:33:48 +00:00
364e6692f3 2024-06-28 nightly release (424068d0d22908294f2e0705d7227c37244b9319) 2024-06-28 16:02:35 +00:00
2f21c45513 2024-06-28 nightly release (d4b6ff6fbe31385ca9e5e299ff616203f5b4717d) 2024-06-28 07:33:37 +00:00
8b2405fd53 2024-06-27 nightly release (cdbd6542d015bae9b850602834c4f22e3001084b) 2024-06-27 07:33:34 +00:00
bde150a53f 2024-06-26 nightly release (90d5a6f001ef3ea40ef91ae20e050e39a6d550de) 2024-06-26 16:45:19 +00:00
4f096464a9 2024-06-26 nightly release (00f675bb4c2ec02bb5ffecfc75571026e220701c) 2024-06-26 07:33:42 +00:00
9e60d009a5 2024-06-25 nightly release (00f675bb4c2ec02bb5ffecfc75571026e220701c) 2024-06-25 18:29:51 +00:00
a215657b4a 2024-06-25 nightly release (00f675bb4c2ec02bb5ffecfc75571026e220701c) 2024-06-25 17:15:56 +00:00
2a0a69813f 2024-06-25 nightly release (00f675bb4c2ec02bb5ffecfc75571026e220701c) 2024-06-25 15:58:29 +00:00
1125d755bf 2024-06-25 nightly release (00f675bb4c2ec02bb5ffecfc75571026e220701c) 2024-06-25 07:33:37 +00:00
bbd0babd09 2024-06-25 nightly release (00f675bb4c2ec02bb5ffecfc75571026e220701c) 2024-06-25 04:25:54 +00:00
475c553dc8 2024-06-25 nightly release (00f675bb4c2ec02bb5ffecfc75571026e220701c) 2024-06-25 03:43:16 +00:00
f9a5736c12 2024-06-24 nightly release (17d1723aeeadbfc6d33c02ab56c5aacb8c671876) 2024-06-24 21:35:25 +00:00
9bb4dd8ceb 2024-06-24 nightly release (920ebccca2644881ece4f9e07b4a4b4787b8f2b1) 2024-06-24 07:33:38 +00:00
91c197583a 2024-06-23 nightly release (920ebccca2644881ece4f9e07b4a4b4787b8f2b1) 2024-06-23 07:33:47 +00:00
74ca07380a 2024-06-23 nightly release (920ebccca2644881ece4f9e07b4a4b4787b8f2b1) 2024-06-23 01:49:12 +00:00
cb859f172e 2024-06-22 nightly release (92ca17d85def4a62aee04fcea3576cd0c07a0554) 2024-06-22 07:33:49 +00:00
769190cb3b 2024-06-21 nightly release (63a724d8e1fd49e64e31879c8545b285d213efe3) 2024-06-21 07:34:06 +00:00
5996356b3e 2024-06-20 nightly release (19f3abcde45f686e3d270d41c2c919d7598cec19) 2024-06-20 07:33:34 +00:00
505ff10e75 2024-06-19 nightly release (99f042d336b53844b509406f1ecf78cb6f5e5714) 2024-06-19 07:33:50 +00:00
23e34b0716 2024-06-18 nightly release (c017c97333dfb9d17f2e5357980241827e50e8d5) 2024-06-18 07:33:38 +00:00
8410bf5cd1 2024-06-17 nightly release (f9dae86222aaf15ea085c7774da70781bae46ff9) 2024-06-17 07:33:46 +00:00
db04d3cd9e 2024-06-16 nightly release (6079c5091091d872b8dafbaa4e31a5b6194647ad) 2024-06-16 07:33:35 +00:00
d69902da7e 2024-06-15 nightly release (2d01f877373d3255fa8b77c714c2ca17d08e6126) 2024-06-15 07:33:36 +00:00
3d45df9a51 2024-06-14 nightly release (8c20f53a5e24209624670ecd39841bd71f38d11d) 2024-06-14 07:33:45 +00:00
4ecd3982af 2024-06-13 nightly release (d630e1e838e7416b6e27572a91407c4447a00734) 2024-06-13 07:34:39 +00:00
b5335bf346 2024-06-12 nightly release (86b5df3e71e6b786347ee5fa69daa054849bea2e) 2024-06-12 07:33:44 +00:00
c8e5a721d0 2024-06-11 nightly release (734e8f6ad7e7f0fa0341fb658f1f986225173f5f) 2024-06-11 07:33:40 +00:00
481ceefe2a 2024-06-10 nightly release (df43d5843edd9abd95d2c039670bce51375d9c06) 2024-06-10 07:33:37 +00:00
963d450511 2024-06-09 nightly release (f681e3689b857b8811f19d60d439bfb3fb2dd2d3) 2024-06-09 07:33:33 +00:00
6a855e6923 2024-06-08 nightly release (6e5c2a1a3bc9507ec459f3e01f5e492d8bef122a) 2024-06-08 07:33:39 +00:00
9f88f70e2e 2024-06-07 nightly release (65aa16f968af2cd18ff8c25cc657e7abda594bfc) 2024-06-07 07:33:43 +00:00
529bedad3e 2024-06-06 nightly release (ffaea656b5d8ff6518669494cc8f664b94f8e8b1) 2024-06-06 07:33:42 +00:00
d3551df158 2024-06-05 nightly release (3bcc3cddb580bf0f0f1958cfe27001f236eac2c1) 2024-06-05 07:33:52 +00:00
b94ddab65b 2024-06-04 nightly release (1208347d0912d1236ae43257e2914767e35c3b36) 2024-06-04 07:33:37 +00:00
1b6f0e0d5f 2024-06-03 nightly release (7e97b33fbbb24fa5876500be764c97f51b74ac0e) 2024-06-03 07:33:39 +00:00
074e4f1c80 2024-06-02 nightly release (c19ad112f65ab88af3eeb319dda0675c076b0b67) 2024-06-02 07:33:38 +00:00
70b03a1d35 2024-06-01 nightly release (121c55d8d12a878b12eab00a7cebae2e2fa47ee7) 2024-06-01 07:33:43 +00:00
4d012a7a2f 2024-05-31 nightly release (029af29e6d2a91bc8a6e15d445fec42a49c0454e) 2024-05-31 07:34:17 +00:00
0bbe39cc0e 2024-05-30 nightly release (d66f12674cfe0151a86dc10b8de216f83bf42e6e) 2024-05-30 07:34:02 +00:00
c35e8711f4 2024-05-29 nightly release (0ff2f8b52248323bbe25108b64e706c43390cb72) 2024-05-29 07:33:53 +00:00
4fbc2ec72d 2024-05-28 nightly release (0ff2f8b52248323bbe25108b64e706c43390cb72) 2024-05-28 07:33:37 +00:00
2995cdef9a 2024-05-27 nightly release (6aa5bb1a76dee8112f1a9e7c194c790b5cdc6462) 2024-05-27 07:33:37 +00:00
549167fcb3 2024-05-26 nightly release (ee6cb6daa173896f8ea1876266a19775aaa4f610) 2024-05-26 07:33:37 +00:00
99b987f154 2024-05-25 nightly release (ed838793dfb530273fec30ea64954333672794e3) 2024-05-25 07:33:32 +00:00
269ad139c7 2024-05-24 nightly release (cb6ef68caa22c1a2f7a4e8583c0e7c923c8bfd17) 2024-05-24 07:34:51 +00:00
534d4bbde2 2024-05-23 nightly release (4f14282e350a6c9a0a280083c286f7d672fa3ebd) 2024-05-23 07:33:35 +00:00
1aa713bf08 2024-05-22 nightly release (608a11c4963dbba86189286da4a0aa58c0779126) 2024-05-22 07:33:51 +00:00
3149d44669 2024-05-21 nightly release (31ba6ee49bdbd7dd6c2a3c77ad31cc46aec2049e) 2024-05-21 07:33:38 +00:00
2a8ecff0c9 2024-05-20 nightly release (5fb11cda4fe60c1a7b30e6c844f84ce8933ef953) 2024-05-20 07:33:39 +00:00
bca6d8b2fe 2024-05-19 nightly release (71b6459edc19dac02b6e559977a0f615bf451210) 2024-05-19 07:33:36 +00:00
b138ead39a 2024-05-18 nightly release (d4704dcacc543023e7a746263a970bef155d58c4) 2024-05-18 07:33:43 +00:00
7fa9edf793 2024-05-17 nightly release (da1fc85d60fcf0bd1e8638d643a7c0c6560c3a5f) 2024-05-17 07:33:44 +00:00
a86434a3ca 2024-05-16 nightly release (315389bfed3a0b29752997862e18681169ebd501) 2024-05-16 07:33:42 +00:00
e5cd7bea02 2024-05-15 nightly release (1485621ccbb5367936ba6da51ba896387416c353) 2024-05-15 07:33:42 +00:00
8ea7ef39bc 2024-05-14 nightly release (9a2beb862d9c30f037c9b2eac878ec3f9227a5e2) 2024-05-14 07:33:54 +00:00
708a49f3e2 2024-05-13 nightly release (1c3fe8403365db3cc9b75524ae742e3027b745e2) 2024-05-13 07:33:36 +00:00
2de26f12a8 2024-05-12 nightly release (02093b6c6ae1046368e2500881d0bb5880873386) 2024-05-12 07:33:45 +00:00
d4d98b914c 2024-05-11 nightly release (c1690a3e12dd4a748af9827e84dbe8fa5273ee51) 2024-05-11 07:33:39 +00:00
8d3ad715c8 2024-05-10 nightly release (fcbf2b61e6f40048ef0e6d77360c86771956cc9c) 2024-05-10 07:33:33 +00:00
8f863f5efd 2024-05-09 nightly release (ff090c693751f564f45951f1b13c11478892799d) 2024-05-09 07:33:46 +00:00
e1c37ce2c1 2024-05-08 nightly release (50073127b5e49b2b75a912d73a70ecb61890a32d) 2024-05-08 07:34:30 +00:00
e4f522910a 2024-05-07 nightly release (48b6c8dbc376db4406a979b35cd6909bcb428931) 2024-05-07 07:33:24 +00:00
3e7378d00b 2024-05-06 nightly release (fc183f0bdec30baa6686e13720adae077c332bdd) 2024-05-06 07:33:40 +00:00
1b7523fbe9 2024-05-05 nightly release (6d30803d64953955df63da56833bf4eb52249aae) 2024-05-05 07:33:44 +00:00
48aefeb4a4 2024-05-04 nightly release (d4727fd4ebd42936a1bae7b7f44ee9a038fd643e) 2024-05-04 07:33:42 +00:00
5969fe4abd 2024-05-03 nightly release (a40d6df448de1acb263ed8f6ff9e7d26f5a1a161) 2024-05-03 07:33:33 +00:00
267a1102f8 2024-05-02 nightly release (b03fb49ed8d812ceba3741432ac1f9de3bd1650e) 2024-05-02 07:33:34 +00:00
01c419df43 2024-05-01 nightly release (1c905f1be34683480a33e36104b8482442d7c1fd) 2024-05-01 07:33:43 +00:00
dca7af5889 2024-04-30 nightly release (e7846447e03d44baf4437cf122fb6e70b0c1d472) 2024-04-30 07:33:36 +00:00
afb4c766d2 2024-04-29 nightly release (cc06c00a56a78e3ecdc1b5618e1af4a1a2be7cd5) 2024-04-29 07:33:40 +00:00
34ade3521c 2024-04-28 nightly release (7478b7f1cac9686f00edf3db4667cf86d2421531) 2024-04-28 07:33:44 +00:00
9179db815b 2024-04-27 nightly release (5944a5355501cb45fac966d3cf4277cbc718eff1) 2024-04-27 07:33:41 +00:00
38547e9613 2024-04-26 nightly release (769b1e6cdca56ee91a61ee6b1502a9faa1819106) 2024-04-26 07:33:36 +00:00
734a000f16 2024-04-25 nightly release (13ab24f1923359cdfe795a32cdd3d753f940efbe) 2024-04-25 07:33:48 +00:00
c5b898e77e 2024-04-24 nightly release (b91f83f18139a6ed2626c30979a38e533d4c7d7c) 2024-04-24 07:33:44 +00:00
6d1c678c58 2024-04-23 nightly release (7706cd7d12781b8c537dd045745738d60c6c31f1) 2024-04-23 07:34:04 +00:00
0a3e5f5bad 2024-04-22 nightly release (b1984237a0fb32b760c1b84d6d02d2f0f7ed293b) 2024-04-22 07:33:34 +00:00
02b1ebb3dd 2024-04-21 nightly release (bad8d25881d850eaf0b326f6ce5c78305e38c001) 2024-04-21 07:33:29 +00:00
db26423735 2024-04-20 nightly release (bad8d25881d850eaf0b326f6ce5c78305e38c001) 2024-04-20 07:33:37 +00:00
ee5e704c78 2024-04-20 nightly release (bad8d25881d850eaf0b326f6ce5c78305e38c001) 2024-04-20 03:06:07 +00:00
e0c77fb573 2024-04-19 nightly release (c9db59e9e4d425f9d4e9f55247888c24b0d638e8) 2024-04-19 18:17:20 +00:00
dc9eea4365 2024-04-19 nightly release (4946638f06e5916ea9bd0f790ff620bdb78a92a3) 2024-04-19 07:33:36 +00:00
0a0cedaf40 2024-04-18 nightly release (9385ef2a5dbd62cb877e863c91ff29a43c340456) 2024-04-18 19:38:08 +00:00
d8b31eb548 2024-04-18 nightly release (14162eecfcb5cc11139260c034c653e972a9073a) 2024-04-18 07:33:30 +00:00
449c28cb0d 2024-04-17 nightly release (dd3cea3291346e66a870bbec51e4d1a3550300db) 2024-04-17 07:33:43 +00:00
871442ef61 2024-04-16 nightly release (67bd43b5106bed6365f1986229d7c32079653e76) 2024-04-16 21:06:16 +00:00
0b80497b9d 2024-04-16 nightly release (e4efa311f1c692813befc45142d668f35a66392b) 2024-04-16 07:34:03 +00:00
42cbe0248e 2024-04-15 nightly release (19f50333e91e9e8b20a78517becd74bca70c7d46) 2024-04-15 07:33:32 +00:00
e275bb87c8 2024-04-14 nightly release (88a71594933b2464d9d8b6b3533c5a945a4ac2ff) 2024-04-14 08:03:45 +00:00
99d3d33091 2024-04-13 nightly release (7e3f80f00f3995e335bd6313b8c4be998cc4e2cd) 2024-04-13 07:33:52 +00:00
19db4687de 2024-04-12 nightly release (868e5ced5df34f1aef3703654f76e03f5126b534) 2024-04-12 07:33:35 +00:00
ad9f88b232 2024-04-11 nightly release (793df52dc52f5f5f657744abfd7681eaba7a21f9) 2024-04-11 07:34:32 +00:00
4dae33d814 2024-04-10 nightly release (65710d95c9adcc166e4d4a19ac16b8d7516fc87b) 2024-04-10 07:33:43 +00:00
74f1665940 2024-04-09 nightly release (d8e0c26e642bf8e44cc6e8d46822d54957b1c660) 2024-04-09 07:33:36 +00:00
a3fc530d82 2024-04-08 nightly release (bb04f3f66a5b92f0bed3712689f57774f00db349) 2024-04-08 07:33:35 +00:00
903938e95d 2024-04-07 nightly release (e3ea31662334770bbbb6da4abd881abc875e04c3) 2024-04-07 07:33:34 +00:00
a4ae0b67c2 2024-04-06 nightly release (22b99871442fc3c78ec173894c017d6d98996501) 2024-04-06 07:33:35 +00:00
8f9cba17bb 2024-04-05 nightly release (d8717c2d68bc0a8bda7e8ad823a0d6c2d8aae934) 2024-04-05 07:33:29 +00:00
3d14e74ccb 2024-04-04 nightly release (669462817071d1badd88a832a7c38b9bafde2233) 2024-04-04 07:33:48 +00:00
9da0ebeb73 2024-04-03 nightly release (15bd81bfafa86fec9d675e7f071c867c852ebe8f) 2024-04-03 07:33:51 +00:00
16261ef1eb 2024-04-02 nightly release (4d5cdc2e1e62ec400f4355cb9d3b0d2b5af0fbf8) 2024-04-02 07:33:38 +00:00
e1c8416590 2024-04-01 nightly release (781e8d2201c1e2aaeccbbc7b7b13f9322b481bc9) 2024-04-01 07:33:45 +00:00
4af23dd2d2 2024-03-31 nightly release (35c493f2cf9b623bfdc7e6b34dc1cb39690a7919) 2024-03-31 07:33:39 +00:00
70a3132121 2024-03-30 nightly release (f4ff063c333f286d4384523bac67c047aca4d7b9) 2024-03-30 07:33:42 +00:00
2f5e4e1df7 2024-03-29 nightly release (3beb9d85a6501d8df5a89e2cc2d7cb0f8f94dbfc) 2024-03-29 07:33:31 +00:00
096c0ed5e4 2024-03-28 nightly release (12116aee6852df2b040255b8fcc7deb52b897792) 2024-03-28 07:34:33 +00:00
384cbf2346 2024-03-27 nightly release (f2c1060de3cdddbfefcab11e547211993d0f9cfa) 2024-03-27 07:33:38 +00:00
a166d4b816 2024-03-26 nightly release (5e66bf5f42104bc60a8e77b51c84a4cce81efdd8) 2024-03-26 07:33:52 +00:00
c18fcd604f 2024-03-25 nightly release (deeeaded1ff45d0e1e666e42cadf2026594b6426) 2024-03-25 07:33:31 +00:00
848a2e8f52 2024-03-24 nightly release (f0d461beacded34abe196c72ec4bcdb55bf01793) 2024-03-24 07:33:29 +00:00
bbafd43d86 2024-03-23 nightly release (c677221798d8ce87c97aac1bd9ae34af0767c383) 2024-03-23 07:33:27 +00:00
9efd8fed32 2024-03-22 nightly release (ef0d470eb372a5e68f6332bea9b1df26fca28b59) 2024-03-22 07:33:31 +00:00
cfaed59ce7 2024-03-21 nightly release (cc0cadaf4c76c91d19474a1a512c9bc31e2c8602) 2024-03-21 07:33:42 +00:00
90fdee15be 2024-03-20 nightly release (6b5259e50704aede43c87fed33f64224f9047087) 2024-03-20 07:33:56 +00:00
0e66636599 2024-03-19 nightly release (5bc7f7f97760d2d485621f9f30d0316e1f2440c6) 2024-03-19 07:33:41 +00:00
c823f7a4de 2024-03-18 nightly release (7380585d9755eaef95bec8994a12e1fc6510f81b) 2024-03-18 07:33:33 +00:00
ad27e5012d 2024-03-17 nightly release (6f74b7607207054c1f5331183ab725640c99fe8d) 2024-03-17 07:33:37 +00:00
c5774a585f 2024-03-16 nightly release (f0b9a8344a38d2e865d08f084138ac2911a66ad1) 2024-03-16 07:34:28 +00:00
285db4990f 2024-03-15 nightly release (e4fda049c2bfe5a2e3aa0228a3792de87b4f670b) 2024-03-15 07:33:35 +00:00
8ef78d58d3 2024-03-14 nightly release (5b90074540577267c29f5f784be123ee54f6491d) 2024-03-14 07:33:44 +00:00
2447af7e49 2024-03-13 nightly release (9f235971f02e0d53038f5bcef9b7018be2ac8c6d) 2024-03-13 07:34:36 +00:00
a9e4e9b51d 2024-03-12 nightly release (6ca9ae4f8693639c395544327f7e362441a58c79) 2024-03-12 07:33:34 +00:00
1a6ae7b963 2024-03-11 nightly release (f11f2b0d55b1aa322f73f4bb521beaf9d4563603) 2024-03-11 07:33:34 +00:00
715d399539 2024-03-10 nightly release (41286f1505ffb214d386d72e4b72ebd680a4a475) 2024-03-10 07:33:30 +00:00
ffcaab7876 2024-03-09 nightly release (d482614fec5fb9bccb49bf4ee4ab561e872c0f50) 2024-03-09 07:33:47 +00:00
55e9301321 2024-03-08 nightly release (953c6c37cbfcc894963120666dbf066977931885) 2024-03-08 07:33:57 +00:00
ce01333322 2024-03-07 nightly release (a2854ae904c55ebf19031ca6e883e5dcbb6a4a9a) 2024-03-07 07:33:32 +00:00
c992f0fc38 2024-03-06 nightly release (dad1b765848c4f52501c4c60b1c3e6fbd3cc8837) 2024-03-06 07:33:41 +00:00
65d5412d00 2024-03-05 nightly release (6566b3db677a6d3f2f90a3fcd4dd1dd2d9d0bfc4) 2024-03-05 07:33:46 +00:00
c652a65065 2024-03-04 nightly release (581fe26792849a80f04feaa7f8bdec69b7f41dd8) 2024-03-04 07:51:50 +00:00
0586ca040c 2024-03-03 nightly release (581fe26792849a80f04feaa7f8bdec69b7f41dd8) 2024-03-03 07:33:40 +00:00
de87c0a689 2024-03-02 nightly release (581fe26792849a80f04feaa7f8bdec69b7f41dd8) 2024-03-02 07:33:26 +00:00
4be1e6af8b 2024-03-01 nightly release (2e84d01d0576a31a6387e4ccc195f11449fb875a) 2024-03-01 07:34:16 +00:00
574ad2898e 2024-02-29 nightly release (f5b99976adcbb01fd71bd0a39ea15bdac6c9e48a) 2024-02-29 07:33:43 +00:00
c019093955 2024-02-28 nightly release (9c597ff137ead9f7f7ec8fdcbf473de2d328e61b) 2024-02-28 07:33:41 +00:00
738f032fb9 2024-02-27 nightly release (a1c641f11809c1f04e1027086f44b1b8f8427dd9) 2024-02-27 07:33:39 +00:00
29e2492945 2024-02-26 nightly release (834c7a1d3ea07878ad87d127ee28606fc140b552) 2024-02-26 07:34:44 +00:00
53f5035912 2024-02-25 nightly release (5c7b761f8e748fe45c8e2e29563df637ae651917) 2024-02-25 07:33:22 +00:00
ea9f12d7d1 2024-02-24 nightly release (79f059987eead42b9279e31e28ed6fd697630d36) 2024-02-24 07:33:36 +00:00
d25357d14e 2024-02-23 nightly release (36c1cc962aaef854d2388a5ecfde230d40bcc1d6) 2024-02-23 07:33:36 +00:00
f8fa3bf4c8 2024-02-22 nightly release (fff9d98e58cda4ad67fef6d0739f684ca7d4bbec) 2024-02-22 07:33:34 +00:00
fe8007dad8 2024-02-21 nightly release (becfda005e524f93b1efed64917a129ef6778135) 2024-02-21 07:33:33 +00:00
8efa066dc0 2024-02-20 nightly release (becfda005e524f93b1efed64917a129ef6778135) 2024-02-20 07:33:29 +00:00
b57318ddcc 2024-02-19 nightly release (becfda005e524f93b1efed64917a129ef6778135) 2024-02-19 07:33:31 +00:00
672f086ea7 2024-02-18 nightly release (becfda005e524f93b1efed64917a129ef6778135) 2024-02-18 07:33:26 +00:00
d70e8ef6c8 2024-02-17 nightly release (becfda005e524f93b1efed64917a129ef6778135) 2024-02-17 07:33:36 +00:00
0747ef0bfb 2024-02-16 nightly release (62e5840b36b6790a5008faa76a920c6d154897b9) 2024-02-16 07:33:25 +00:00
04a9a7cd1a 2024-02-15 nightly release (cd08dc37f831f7b7deabdc92893758de84c4d3d9) 2024-02-15 07:33:42 +00:00
9debfae3d5 2024-02-14 nightly release (16369816a2e1c298fe5d15a439e922d10eb1894b) 2024-02-14 07:33:29 +00:00
98d4762774 2024-02-13 nightly release (2502a011108b0592dd6dd34e5a61b2d52b95160e) 2024-02-13 07:33:27 +00:00
633abee414 2024-02-12 nightly release (dcb08a7044d986ceecd7252fdb2dd0e10501587a) 2024-02-12 07:33:37 +00:00
fede3c1a2f 2024-02-11 nightly release (3ab08946d5052eaeda11d683d6a58e801a032755) 2024-02-11 07:33:40 +00:00
8f9cff839c 2024-02-10 nightly release (42ca82dfb1eb133b287b881c346a0032c1c2a08e) 2024-02-10 07:33:28 +00:00
1396090c1f 2024-02-09 nightly release (930b60f5aa7e17402491900a545d96822796b638) 2024-02-09 07:33:28 +00:00
0b66915bb5 2024-02-08 nightly release (5f6b35915af98fac741998edb55aeeba4aaa3794) 2024-02-08 07:33:36 +00:00
3cbc8e89fd 2024-02-07 nightly release (d0ca849fdf9807e730f91ce8e86d126b241e0940) 2024-02-07 07:33:40 +00:00
7f3f3c3d68 2024-02-06 nightly release (12ac3ba383af99733ec23fcd53c7e29f70b68371) 2024-02-06 07:33:48 +00:00
e795fa298a 2024-02-05 nightly release (b8307513e57f8beaf99daff342a23d705a417e11) 2024-02-05 07:33:36 +00:00
68d3fec0cb 2024-02-04 nightly release (8f82a44a5beed0f205a7f273b9f062da43d65679) 2024-02-04 07:33:30 +00:00
6517e8e586 2024-02-03 nightly release (ce40ee8ecdfdecd48ffcec4fced35f16a77f268d) 2024-02-03 07:33:21 +00:00
aaafe2673d 2024-02-02 nightly release (08472a4fd55881260cbad8a15260528e6c4bee04) 2024-02-02 07:34:01 +00:00
671f5d2d7f 2024-02-01 nightly release (4cba1dd0c3e7f625856566ae00d89d8ee2e8b990) 2024-02-01 07:33:32 +00:00
e3133a2234 2024-01-31 nightly release (e3cde685340d2c5c752428b37449ba75f59488af) 2024-01-31 07:33:40 +00:00
1e875b454f 2024-01-30 nightly release (dcc077eea2f95a9d5caa65eeca4d8c73e207f61d) 2024-01-30 07:34:34 +00:00
4e2aa5dbb8 2024-01-29 nightly release (890d8e66925ff7bb1b765087ad921ebc1bdebf48) 2024-01-29 07:33:38 +00:00
4749337721 2024-01-28 nightly release (2ed0af2bdeb2a5e0df78b22eab04d468f8db239e) 2024-01-28 07:33:22 +00:00
8f8d7de630 2024-01-27 nightly release (fe10b1800f5df432532fc91e0d553a521ec53479) 2024-01-27 07:33:29 +00:00
b12247c7ad 2024-01-26 nightly release (0c5da6100f5426173a64da5873fb565553c8510e) 2024-01-26 07:33:42 +00:00
86adc78cb3 2024-01-25 nightly release (2d7a360911fb7b27be82c51ca86b4b34b6f1b087) 2024-01-25 07:33:46 +00:00
54fee3b5dd 2024-01-24 nightly release (26968cefb0f0ddf31650015c95bbc11e8eaf88ec) 2024-01-24 07:33:42 +00:00
01a5c71e1d 2024-01-23 nightly release (5ec2d7959d1ef1e8eeacc5e59bbf0f8b2dda1ea6) 2024-01-23 22:34:47 +00:00
6458e8cbc2 2024-01-23 nightly release (3708f2608e932b588ae7d73788b969cf83ece8e8) 2024-01-23 07:33:38 +00:00
72fcb9ad66 2024-01-22 nightly release (39df084001c54cca5fe3174176f9b0206ddb7dcf) 2024-01-22 07:33:42 +00:00
4f9be7c2d0 2024-01-21 nightly release (949a76a7f001cc4ec0d8e14071d264f35b5f8a8a) 2024-01-21 07:33:40 +00:00
ffb3d30801 2024-01-20 nightly release (15d568d62142f207734c29a370d6e7ce0e7753de) 2024-01-20 07:33:32 +00:00
5a2ff3fc7c 2024-01-19 nightly release (f302a0d380cf2a6e8c2ae7e67c6859503a038cf7) 2024-01-19 07:33:38 +00:00
2b31a65125 2024-01-18 nightly release (02c96f69493ebd06d940a778a2d3d89c660a7ce4) 2024-01-18 07:33:35 +00:00
7b9dec1bfc 2024-01-17 nightly release (735715e6d31fb4356cc203824e0600b5f5dcb433) 2024-01-17 07:33:39 +00:00
24569d3416 2024-01-16 nightly release (28bb31e4a588f3fa198e927439fe09c871201aab) 2024-01-16 07:33:45 +00:00
cb0c26e8d3 2024-01-15 nightly release (19502ff6aaba0bce823989783d01b996469482f9) 2024-01-15 07:33:37 +00:00
6c531039fa 2024-01-14 nightly release (38c18f382577ee4a5587fd1876d2e2b546d004b8) 2024-01-14 07:33:30 +00:00
aa962d8943 2024-01-13 nightly release (96163eb010d233f97f114257ac1066def0164769) 2024-01-13 07:33:29 +00:00
ff10016dc5 2024-01-12 nightly release (b62ba82cdc2123d5542b18c035d52d28aee5c01a) 2024-01-12 07:33:40 +00:00
1beb432db5 2024-01-11 nightly release (ca23c56efcda3a0037252272a88b78936a5a9187) 2024-01-11 07:34:37 +00:00
f8e4c22ee2 2024-01-10 nightly release (8bcdde5058658cc193c94a7f1eb16660553dc35a) 2024-01-10 07:33:36 +00:00
03969cb2d2 2024-01-09 nightly release (428807f9bcd9bb32e0369c6b9c6f12fa4a4080e9) 2024-01-09 07:33:44 +00:00
da46ac2bbe 2024-01-08 nightly release (57491d2046b7ddcdf5588dea3cc1dee6c1eb366b) 2024-01-08 07:33:33 +00:00
2f05263756 2024-01-07 nightly release (95041829c8a76755d22df9bd711f25781e57e223) 2024-01-07 07:33:39 +00:00
98e5582aab 2024-01-06 nightly release (ff0f79d3c7aff0f9dd83b3a48d09a61ac8e82621) 2024-01-06 07:34:01 +00:00
f1655ac7ae 2024-01-05 nightly release (39f885331394e6baea6220f7e22bf1cda17bc361) 2024-01-05 07:33:34 +00:00
b99446db6b 2024-01-04 nightly release (e2359f72c82a14a1e2c6f036528fa9418227eba3) 2024-01-04 07:33:30 +00:00
804969c7ad 2024-01-03 nightly release (ca4df16fddc79b816021dc863f26056559dcf5bc) 2024-01-03 07:33:41 +00:00
a87500edd6 2024-01-02 nightly release (1ed8efa9b35328df6f0e8dbfd27578dcfde58e60) 2024-01-02 07:33:36 +00:00
7003edfbb4 2024-01-01 nightly release (fc5fda14bcc41008e5610bf39c53eb066933ea4e) 2024-01-01 07:33:30 +00:00
b41958165d 2023-12-31 nightly release (4bfaa6bc250f5ff5702703ea237f578a15bbe3b6) 2023-12-31 07:33:41 +00:00
534a8bd9e7 2023-12-30 nightly release (97891b184c12763f335fbe1ff63fab843edafab5) 2023-12-30 07:33:34 +00:00
7d163a9861 2023-12-29 nightly release (ef94499ad7ff40fee8aa230d71cd109570b74405) 2023-12-29 07:33:30 +00:00
ef28de986b 2023-12-28 nightly release (6375eb15efa1273514695e468f68963feedc0067) 2023-12-28 07:33:31 +00:00
50f7a03676 2023-12-27 nightly release (9c3ae37fc453505f5e437d1edadefdb278c2c39c) 2023-12-27 07:33:33 +00:00
eb8dedaa36 2023-12-26 nightly release (f6dfbffb3bb46ada6fe66b5da4f989f9d4d69b3c) 2023-12-26 07:33:46 +00:00
b68f93b094 2023-12-25 nightly release (36dccc2aba61a2637aa5d42f38b6fd1fe10dcbdc) 2023-12-25 07:33:40 +00:00
1ed8e26791 2023-12-24 nightly release (6c419a0efdcfa8e262582795144102051b527f62) 2023-12-24 07:33:26 +00:00
c70de4e370 2023-12-23 nightly release (84b2a323594bc7c4b47d61223b3f6466fe054416) 2023-12-23 07:33:50 +00:00
b6723b1317 2023-12-22 nightly release (b47aa69685aa6e561d3ba0edaff84b7833b98a82) 2023-12-22 07:33:29 +00:00
9a70b79cdc 2023-12-21 nightly release (f2c1fb3ee40012ab17a0548dcbff509d6b1dbf39) 2023-12-21 07:37:07 +00:00
1ee7950847 2023-12-20 nightly release (3747aca49a39479c2c5e223b91369db5bd339cdf) 2023-12-20 07:33:30 +00:00
ba4c18f527 2023-12-19 nightly release (9dda4b20a0dcc4786a130c12fc3e4a6a1ecf23d8) 2023-12-19 07:33:30 +00:00
ad2706c6cd 2023-12-18 nightly release (87ea6fb84471c09795672de542622515f81af3d7) 2023-12-18 07:33:27 +00:00
a2940cd437 2023-12-17 nightly release (87ea6fb84471c09795672de542622515f81af3d7) 2023-12-17 07:33:25 +00:00
8bea527b1a 2023-12-16 nightly release (e3aefe297050e197ff3f36f555a134a0e61ffc9c) 2023-12-16 07:33:34 +00:00
a846296964 2023-12-15 nightly release (9056903b098bb9190aa7d08caf5dbf549c5ada3a) 2023-12-15 07:33:35 +00:00
41dd4009c9 2023-12-14 nightly release (f5458f8f00006e7ba25731de42d651c808ccd8eb) 2023-12-14 07:33:49 +00:00
f13a21cce5 2023-12-13 nightly release (0dad85b402c2068c76a61d7c1ddcb60767ca1ef7) 2023-12-13 07:33:46 +00:00
935bfdac27 2023-12-12 nightly release (fbb744fd496c9ee85bcb68e2ba956753e61f6f88) 2023-12-12 07:33:32 +00:00
2cf75d28fe 2023-12-11 nightly release (fe01605830145b5aa204120b90361021a2952ac1) 2023-12-11 07:33:52 +00:00
5da0a5dcb6 2023-12-10 nightly release (4eb5838e184d7f5a795498d32bdde17f0088350f) 2023-12-10 07:33:41 +00:00
7cbcaabb5f 2023-12-09 nightly release (2ed47fecc5b9c526be9cc3c9bab93ca1fdb81547) 2023-12-09 07:33:30 +00:00
742864de52 2023-12-08 nightly release (9521331ba5d081a27b92eea50e42ed5da3dd973c) 2023-12-08 07:33:40 +00:00
424d4406cb 2023-12-07 nightly release (7ec145bfed9813599a0a9dcacbc7aa881626af79) 2023-12-07 07:33:47 +00:00
01557064c0 2023-12-06 nightly release (a548e805364b01554c5e48553f6c779d0973f498) 2023-12-06 07:33:55 +00:00
f87b94dcdc 2023-12-05 nightly release (7843df60e41f856edb148bbcbb5b9aee8292db74) 2023-12-05 07:33:43 +00:00
a111e45dfe 2023-12-04 nightly release (3fbfa8cd0a5cefadb3f116c5cd0d60e96ab8c99e) 2023-12-04 07:33:35 +00:00
6fc9d89863 2023-12-03 nightly release (3a2e2044cd36c1c300039aa915863b3e74df396b) 2023-12-03 07:33:48 +00:00
d2edc8ce7c 2023-12-02 nightly release (0fef82b3dfb4a5da1555a3c046950594b83e2898) 2023-12-02 07:33:24 +00:00
56a830e765 2023-12-01 nightly release (9267ab903200ed2ba548de3fa8a7e59a972cf68c) 2023-12-01 07:33:33 +00:00
ef73b5c9a8 2023-11-30 nightly release (f1fe0b685c4b93a684f0461a7a8f331e9dd17506) 2023-11-30 07:33:31 +00:00
95cc39e7d6 2023-11-29 nightly release (48820c928ce1822565821db3e73d4ee79a2508a1) 2023-11-29 07:33:54 +00:00
5ae273f939 2023-11-28 nightly release (11f11e95df9c205d427fe4dd7e63c9adb91ea03f) 2023-11-28 07:33:39 +00:00
b2f25d6342 2023-11-27 nightly release (624f2025229312bffd725775760dc9b147cc32d8) 2023-11-27 07:33:45 +00:00
df0c1b3a93 2023-11-26 nightly release (d37c4c69954ad7bdccca96854105c48e93d4587e) 2023-11-26 07:33:34 +00:00
794fd233b4 2023-11-25 nightly release (0f5e24bda9450a89ba56d2fdd471f56d97fe4546) 2023-11-25 07:33:36 +00:00
88ca0cc13d 2023-11-24 nightly release (dad3cc4d026794ad3ec309f7ae38e9c3798ce34f) 2023-11-24 07:33:31 +00:00
7f6282544f 2023-11-23 nightly release (b27565ad7d07baed98df50b05f108e209a6f4755) 2023-11-23 07:33:39 +00:00
3164682468 2023-11-22 nightly release (e0ec71deab2aedd6d44f4ea3e03b52bdaf5db3da) 2023-11-22 07:33:47 +00:00
8b7f52e025 2023-11-21 nightly release (18e1a37c4e637370ad3ceee91cecffcfeb4b79b6) 2023-11-21 07:33:34 +00:00
53233b86d5 2023-11-20 nightly release (7963aaac4157c961c1d54033d1f2515d0c987360) 2023-11-20 07:33:50 +00:00
6f045ad045 2023-11-19 nightly release (870539670aff386d66b190b4749316e21d2ae56e) 2023-11-19 07:33:31 +00:00
07c9b95f45 2023-11-18 nightly release (5e307417548374bb40d754518d324efa5359e8aa) 2023-11-18 07:33:37 +00:00
0603dcb449 2023-11-17 nightly release (e8ee14292e83f065c9baf0530de17759134091ee) 2023-11-17 07:33:49 +00:00
690c805c8b 2023-11-16 nightly release (a3b859fc673515f74d103494e61b432c9cc9009f) 2023-11-16 07:33:45 +00:00
a5a404865c 2023-11-15 nightly release (6f4409073f9fad32016f18b3b083f959d7600a2b) 2023-11-15 07:34:19 +00:00
ec2f8fd2f1 2023-11-14 nightly release (ffc3731dc477c92c1daa764f55bab1692c6a6361) 2023-11-14 07:33:40 +00:00
a45a8bf9e7 2023-11-13 nightly release (ef49f61f1909c2feb717023a6b55d87ac9c70a04) 2023-11-13 07:33:54 +00:00
63a5a14da9 2023-11-12 nightly release (ef49f61f1909c2feb717023a6b55d87ac9c70a04) 2023-11-12 07:34:05 +00:00
f40306d6c4 2023-11-11 nightly release (ef49f61f1909c2feb717023a6b55d87ac9c70a04) 2023-11-11 07:33:22 +00:00
edbf22fa03 2023-11-10 nightly release (265d6aac0b71b917d6e36c5dd65c22f61644b715) 2023-11-10 07:33:54 +00:00
2c3ba6926e 2023-11-09 nightly release (fbf7866ac98258a8130e14de3021d3160bc28af3) 2023-11-09 07:33:27 +00:00
4060c20449 2023-11-08 nightly release (aa376e31fdc8aacc6bfe1de5f6bff1d5f4fad136) 2023-11-08 07:33:33 +00:00
291abc63c0 2023-11-07 nightly release (10a829b85d721a76ba436f2416d2d802aab6e9e0) 2023-11-07 07:33:58 +00:00
a04dd794ad 2023-11-06 nightly release (bd9be877e4459d7889e5e3c8051caaffdfe21c85) 2023-11-06 07:34:04 +00:00
2d7dd2e800 2023-11-05 nightly release (64f326097be8ac66ff057365f3bed2d64c697563) 2023-11-05 07:33:37 +00:00
a89fef7184 2023-11-04 nightly release (fb044e2b1783854da105c7166becb8cc37ac96c4) 2023-11-04 07:34:00 +00:00
fd56aa0dae 2023-11-03 nightly release (4a17693d193a8b23109ac3129e5cde92babfe191) 2023-11-03 07:34:11 +00:00
d2c3aa1fde 2023-11-02 nightly release (59869903b3a54acb3ac3a93470fc595d1de3fa7a) 2023-11-02 07:33:53 +00:00
e844d7ab04 2023-11-01 nightly release (748c1a1d81400cbb067c0e15c3769e137cce57f4) 2023-11-01 07:33:58 +00:00
7d93922ea8 2023-10-31 nightly release (2f09da3a21a1a49425424715ea3b3f4131392334) 2023-10-31 07:33:50 +00:00
07cc1132a9 2023-10-30 nightly release (919c9b713e65aa1489123259a0ce7e79322d4d9d) 2023-10-30 07:33:51 +00:00
0a16ad0f88 2023-10-29 nightly release (f5088d2e45787094e201fe82677c399ce456ef71) 2023-10-29 07:33:46 +00:00
c0e43d4625 2023-10-28 nightly release (31f605344f255faaef0aede0f98483f4cb09f072) 2023-10-28 07:33:53 +00:00
525a926aaa 2023-10-27 nightly release (c84dbd2c0393fed3e3d98030c85c30fbbfdc7c3e) 2023-10-27 07:34:11 +00:00
bf9fd89f63 2023-10-26 nightly release (2aaa7e542cb3b217e19284d10d0c0ae1e2df202b) 2023-10-26 07:34:07 +00:00
e6efc2915b 2023-10-25 nightly release (6e2dfb360b310a8676e96caa6550be083d72bd96) 2023-10-25 07:34:13 +00:00
a422515e75 2023-10-24 nightly release (0e0f6a248db4ff054655ea405211dddc8e7ab817) 2023-10-24 07:34:06 +00:00
88eb6bbb1a 2023-10-23 nightly release (c65c0682b19ed479ea66653fc0457958a3e2c59c) 2023-10-23 07:33:55 +00:00
f468e74875 2023-10-22 nightly release (bf01a7b023c1ce4deb6316d25eadc1791807e88d) 2023-10-22 07:33:53 +00:00
147ac6b312 2023-10-21 nightly release (c84c86f018aea9aa9065ba3d57be1fc9fec51f5a) 2023-10-21 07:33:47 +00:00
6ffe31abca 2023-10-20 nightly release (e24fdfa1774f2bb008865fe991458b51e2bfba1d) 2023-10-20 07:34:06 +00:00
707b7b8613 2023-10-19 nightly release (e3463fe4ca5c43542e0d01814d2a1e13fa7769b3) 2023-10-19 07:34:03 +00:00
8aa40192b2 2023-10-18 nightly release (543a763cd8b433fc5740ce2b9db15b98e83ed9c2) 2023-10-18 07:33:58 +00:00
d902c353e9 2023-10-17 nightly release (013b51f8cc6f973d85c5adb36d5c110b7d42c7a2) 2023-10-17 07:34:05 +00:00
6c94bfc7da 2023-10-16 nightly release (4a388e70f2c7fedcdef598fb2144cc593452f04d) 2023-10-16 07:34:06 +00:00
abcc3fadc8 2023-10-15 nightly release (48989bc820d4dfbc0d23ce6f4a3e3e7495f5b315) 2023-10-15 07:33:47 +00:00
2438ac174d 2023-10-14 nightly release (625a3b1a428a8fa844090a87161b346b6d59075c) 2023-10-14 07:34:06 +00:00
85ee879b6b 2023-10-13 nightly release (a2c17a2b00f7c41866bbde28d33b8c50e5632e01) 2023-10-13 07:33:55 +00:00
996af0de18 2023-10-12 nightly release (9316c8b4bc5e449705e945a3069f35b690d70140) 2023-10-12 07:34:02 +00:00
e96ca539bc 2023-10-11 nightly release (02f6a8126e6453d1f5fba585fa7d552f0018263b) 2023-10-11 07:34:08 +00:00
c5b255161e 2023-10-10 nightly release (468a73f0e3527c52495c864c7d48dc26684f6c0b) 2023-10-10 07:33:56 +00:00
97287ff394 2023-10-09 nightly release (92fea5ae3f5307b8ebf5e9cb123d971237f8bf03) 2023-10-09 07:33:57 +00:00
f90c2af43e 2023-10-08 nightly release (459cef86492cfcfb1ed4136c5d4e5002c1c647a3) 2023-10-08 07:33:47 +00:00
8fe3c302f2 2023-10-07 nightly release (90bf6e3938050fded2dfc49bae9e3207c7f00e04) 2023-10-07 07:34:05 +00:00
20217d1426 2023-10-06 nightly release (11047be10e0767721f3fcb8f3a4b412d1c4834a8) 2023-10-06 07:33:40 +00:00
439cba9277 2023-10-05 nightly release (62cad5b5b0f921e3822ec25b6d7ea4343f4cab21) 2023-10-05 07:34:00 +00:00
56af607c04 2023-10-04 nightly release (510ec7e3c539dfed49df587d09e8a0a87e187201) 2023-10-04 07:34:00 +00:00
4e30fa8231 2023-10-03 nightly release (f7909cb947727195a9c55adaea2f374d0374d84b) 2023-10-03 07:33:58 +00:00
4dae8b4963 2023-10-02 nightly release (06464a347754ef764c90948a74428c0bb8daf9dd) 2023-10-02 07:34:03 +00:00
8d78e808f9 2023-10-01 nightly release (2bcae75513e56bcc12494636b33f31ebd9c2548e) 2023-10-01 07:34:04 +00:00
e6fa795672 2023-09-30 nightly release (6b84658433861a68459adb67b3468a9eb064f0e3) 2023-09-30 07:34:12 +00:00
496a17caec 2023-09-29 nightly release (3dc479e70b3d41dd5b09e9cfef8f5a993c05626e) 2023-09-29 07:34:22 +00:00
fecde478ac 2023-09-28 nightly release (75462fd87024bfe9fb79f4c46ab689bec227caa7) 2023-09-28 07:33:53 +00:00
d7520d8668 2023-09-27 nightly release (869226bf947bad0c4ba740c7b56ffe38acdddf52) 2023-09-27 07:34:04 +00:00
ab61acc20c 2023-09-26 nightly release (ac60638c6c74149f05c7980f571afa6fe0661b0c) 2023-09-26 07:33:56 +00:00
cc697537b0 2023-09-25 nightly release (12cd776d902dea1ee3f0ef7980bea62ff64096d2) 2023-09-25 07:33:54 +00:00
24d07d104b 2023-09-24 nightly release (dee100945eec2285943433e63b0670140eed0d1f) 2023-09-24 07:33:49 +00:00
5d97502dc8 2023-09-23 nightly release (691f8ca4f4108d1c22403d9ef6688e04077d28b4) 2023-09-23 07:33:50 +00:00
90c406a3a1 2023-09-22 nightly release (8856c1628e8ddb3fe3bd51ed8cfef5c02ad0a609) 2023-09-22 07:34:30 +00:00
d8fd7f96b9 2023-09-21 nightly release (55685d57c004f250118fcccc4e99ae883e037e2d) 2023-09-21 07:34:27 +00:00
00ae5fa311 2023-09-20 nightly release (1b3e5b53f336e782d6706eb6e61ec363f2fb2c30) 2023-09-20 07:34:02 +00:00
8fd241588a 2023-09-19 nightly release (54c28c564f0f4da174358312a026398ebca5337c) 2023-09-19 16:20:32 +00:00
4d1773d0c5 2023-09-19 nightly release (98208e51603a27e4b88d933a7c7edc18aada4b9e) 2023-09-19 07:34:08 +00:00
ba087c0903 2023-09-18 nightly release (9e86a093e4c165e093cba38f88ceda97f5b5e3af) 2023-09-18 07:34:06 +00:00
0de25554e9 2023-09-17 nightly release (d860313903f21c26d10c2bdd3a9482466255b001) 2023-09-17 07:33:46 +00:00
78c95e1741 2023-09-16 nightly release (b904432e82933ebbd53fd29910df532f15838176) 2023-09-16 07:33:56 +00:00
4a4af8fb1c 2023-09-15 nightly release (bf5622e9650c8dab77ccabc2fffe600bbdfc0b5b) 2023-09-15 07:33:48 +00:00
5c903fb12c 2023-09-14 nightly release (e066056414aef696265e14bceb54392d4965da99) 2023-09-14 07:34:04 +00:00
2baa4c4928 2023-09-13 nightly release (e066056414aef696265e14bceb54392d4965da99) 2023-09-13 07:34:07 +00:00
7b14f0ff8f 2023-09-12 nightly release (8025b193a966a6d8e3afc9c03a54e577bc04eb3d) 2023-09-12 07:34:01 +00:00
8ef4b6af4e 2023-09-11 nightly release (8ff00360a4daab7848307a9a0b1c81b1da873d0c) 2023-09-11 07:33:55 +00:00
3316374d14 2023-09-10 nightly release (8ff00360a4daab7848307a9a0b1c81b1da873d0c) 2023-09-10 07:33:53 +00:00
11d2c766f1 2023-09-09 nightly release (d4230e55748c66c72e7a17b1cd08540b742b20a5) 2023-09-09 07:34:00 +00:00
806d1a871d 2023-09-08 nightly release (4965fffedad8f7f4d333a384d3b046b029395de5) 2023-09-08 07:33:56 +00:00
0200b1106c 2023-09-07 nightly release (35974234c4f811e28a4a74852d2245ec4d044c2f) 2023-09-07 07:33:56 +00:00
a618118ba9 2023-09-06 nightly release (ce4967ad180bcdc1e89d1c0db4bf63dac8a44b6a) 2023-09-06 07:33:58 +00:00
9ea5b31c6f 2023-09-05 nightly release (51c2e22e94caf56ac89967093b274ac81c7ca9e2) 2023-09-05 07:34:17 +00:00
cf9eb00513 2023-09-04 nightly release (621463a3e6b488b2bff04e355a1abd9a4c5bb2cd) 2023-09-04 07:34:04 +00:00
ea7e229735 2023-09-03 nightly release (e5548f81956157e1fdb76cb52579bffe5915dee1) 2023-09-03 07:33:55 +00:00
753f5a11bf 2023-09-02 nightly release (4084d039b7a4b6e55ee6ca05f2789d7ed03b9d73) 2023-09-02 07:33:56 +00:00
73c794dadc 2023-09-01 nightly release (4a9c6f1b731e86b1d4fe31c6753ce7efdc522efa) 2023-09-01 07:33:43 +00:00
b5b99fe13b 2023-08-31 nightly release (0e4752bafc24755893bd1657c42d8937c275ce74) 2023-08-31 07:34:12 +00:00
b3874abf04 2023-08-30 nightly release (620d267ef30b199e0ba1b37407dcf8670448fb6a) 2023-08-30 07:34:16 +00:00
8c68466304 2023-08-29 nightly release (e68b3ad14f9598334a97c1aff15408e1d8f09002) 2023-08-29 07:34:27 +00:00
2c3aa09cde 2023-08-28 nightly release (138e2895d08a6517c5718b2a0118c1b23ff4664c) 2023-08-28 07:34:02 +00:00
f54acf0991 2023-08-27 nightly release (bad3f2db40ffa491181fc7df5dc310830326d9ec) 2023-08-27 07:33:51 +00:00
c0a66455c0 2023-08-26 nightly release (95cacb7fa97881b7e830d1ee0bcb4729b50632c0) 2023-08-26 07:33:53 +00:00
91bde862b2 2023-08-25 nightly release (e00bd8312440ad121cba8acc12935de4f1d947fc) 2023-08-25 07:34:11 +00:00
b48a2494be 2023-08-24 nightly release (2fcda650cf29301e86dd21a5ceabc0f31fa4e3d3) 2023-08-24 07:33:58 +00:00
62d3bf32fd 2023-08-23 nightly release (83517c8dba012b6b68eafe8f6296db97fe26fa3e) 2023-08-23 07:34:14 +00:00
1d3bf3da21 2023-08-22 nightly release (8ed169b1628285924e10fc98de53dbb75c92c43e) 2023-08-22 07:33:57 +00:00
a65b4aa3e4 2023-08-21 nightly release (11602ac564c0e3178b38a65e09be13644322d303) 2023-08-21 07:33:59 +00:00
4ce227bfb9 2023-08-20 nightly release (11602ac564c0e3178b38a65e09be13644322d303) 2023-08-20 07:33:50 +00:00
668af07501 2023-08-19 nightly release (3ddf30505f8f03bd98e6ff418d7389ba69d8043b) 2023-08-19 07:34:05 +00:00
bac60d8896 2023-08-18 nightly release (3c3874d623b2d2ddf7c666f099ae8d014c7dcca1) 2023-08-18 07:34:16 +00:00
3522f2a7b7 2023-08-17 nightly release (e9af315e02d42ca2075972212e1c5e644620901c) 2023-08-17 07:34:14 +00:00
3af011b858 2023-08-16 nightly release (6c0bba3dafde888777a07e395a6e731c1fa51c26) 2023-08-16 07:34:04 +00:00
e4d5143f8c 2023-08-15 nightly release (2624da638d989c902ef9e1a5cff6028ab816605c) 2023-08-15 07:33:49 +00:00
53551b5c87 2023-08-14 nightly release (2b1058c54273a73ed407f2a6495063e9ef18b54f) 2023-08-14 07:33:54 +00:00
3748ee4a8c 2023-08-13 nightly release (ddd2f682b974fa274771965266c2bc0786f1e747) 2023-08-13 07:34:05 +00:00
c9397a7bc8 2023-08-12 nightly release (5b04e9b6cea2be600e81d111a5e407296522ec8e) 2023-08-12 07:34:02 +00:00
422297f87f 2023-08-11 nightly release (4df84c3b4d93ea53c9ca169f32a0dc4619bee797) 2023-08-11 07:33:53 +00:00
a099ff75d3 2023-08-10 nightly release (dfd441a12cede6702d1fd160eb808778b62adae0) 2023-08-10 07:33:59 +00:00
abe3687720 2023-08-09 nightly release (6691413145b71f20221ae77cb2c5d98b2f3213eb) 2023-08-09 07:33:59 +00:00
c01a41cdec 2023-08-08 nightly release (5f551133dc6dc670705281eb3dcbb6ac0dba9a4b) 2023-08-08 07:33:38 +00:00
5e438d59cd 2023-08-07 nightly release (e35cb480f4df1cf440b8705c93546c1b15891a4b) 2023-08-07 07:33:37 +00:00
a8638d61c8 2023-08-06 nightly release (68cb854d73458a14684d584c25c22b17eb79dfca) 2023-08-06 07:34:09 +00:00
be75ecac1b 2023-08-05 nightly release (dc22b4fdb1b4af1f0b7b77a78b30237d337d7109) 2023-08-05 07:33:43 +00:00
4734e4d60f 2023-08-04 nightly release (aaa989c24451259aff4f0b70070bd7ab8380944d) 2023-08-04 07:33:55 +00:00
f89c73be3a 2023-08-03 nightly release (a6f7dd4707ac116c0f5fb5f44f42429f38d23ab4) 2023-08-03 07:33:39 +00:00
c89b169177 2023-08-02 nightly release (92cac6bf32afabed6de8551196fea1f81a27f65d) 2023-08-02 07:33:54 +00:00
1d1670186a 2023-08-01 nightly release (ceea08a986805030014fac203d5b2411e23ff091) 2023-08-01 07:33:46 +00:00
6298ac688f 2023-07-31 nightly release (3c70d4bda785106b7d55738befa36ff46c596068) 2023-07-31 07:33:55 +00:00
0ff243ff35 2023-07-30 nightly release (858ca65c8a3a8b730426690fcbdfb28e9626eda8) 2023-07-30 07:33:53 +00:00
b638df0afb 2023-07-29 nightly release (f160a972aa7ef2db508dbd233246f6dc61643d88) 2023-07-29 07:33:47 +00:00
eb5cb724fe 2023-07-28 nightly release (e31855d0d6cefe18ca4ee9f74b24d3ed5fbfcffb) 2023-07-28 07:34:14 +00:00
8a24a912a5 2023-07-27 nightly release (707aadeedd081714060e51cee279abaea6305a98) 2023-07-27 07:34:08 +00:00
964a13b3df 2023-07-26 nightly release (8d9c8897eddf53b01ce760ff9ae9ec9e4c2c08b5) 2023-07-26 07:33:43 +00:00
153afbda4b 2023-07-25 nightly release (98956c5320534cb66fd0dd69bc632122e16adba9) 2023-07-25 07:33:59 +00:00
ba1da8199b 2023-07-24 nightly release (54a673bdcf52a36603fcc56dceeef9f5afbd9e62) 2023-07-24 07:33:45 +00:00
a060bf3cf0 2023-07-23 nightly release (45e4706aff4cb92addda329576fcf9e4dc2e772c) 2023-07-23 07:33:38 +00:00
b5222f140d 2023-07-22 nightly release (0ad93a3d5684c2026bda8ff4ab7c72c6596a225b) 2023-07-22 07:33:35 +00:00
f228c8b8ca 2023-07-21 nightly release (a01a7329548e579ba894ac016670296cf96a6802) 2023-07-21 07:33:56 +00:00
a16c87a767 2023-07-20 nightly release (2e81cdc1ddbb07dead5b32a6fbf1514a118ac208) 2023-07-20 07:33:38 +00:00
82e03ad957 2023-07-19 nightly release (9c225c9b9a07eedaede9c27c74188181184fb022) 2023-07-19 07:33:53 +00:00
5e128c4fa1 2023-07-18 nightly release (ad6dad810e871da3ec9d19f1ec0f11a7b62fca03) 2023-07-18 07:33:49 +00:00
6ebf7bd6ab 2023-07-18 nightly release (ba00b0939ea82ec7607dfa8c0b4bd468f4f3bf31) 2023-07-18 00:03:13 -07:00
c437a4b1e0 2023-07-17 nightly release (93f852f201b93ca0c41b5cd861834d4f1f235ef7) 2023-07-17 00:03:38 -07:00
c69b6e5da6 2023-07-16 nightly release (e3c4f2fb59837808756f76c23f1fb29afa40ce9d) 2023-07-16 00:03:36 -07:00
6db8e8b9b7 2023-07-15 nightly release (028536646499abbb9cfce672a3ff7a91f1511e2c) 2023-07-15 00:03:16 -07:00
d257917ad4 2023-07-14 nightly release (a66f08d6265c9800ca00ee5f0a8a626f89e0374b) 2023-07-14 00:03:58 -07:00
fccac344df 2023-07-13 nightly release (0faf8ed49f9da63d1b5339f152586ffd7cde3e83) 2023-07-13 00:03:17 -07:00
e571f87e89 2023-07-12 nightly release (979f826015cbd2b353f02e93865a9b9a8877b414) 2023-07-12 08:10:34 -07:00
927dc66238 2023-07-11 nightly release (63d1fb21f56d9b91dd4080b04276d4cc461224fa) 2023-07-11 00:03:32 -07:00
69565763c8 2023-07-10 nightly release (54f33265dba0d0782febe8bf8029efb2d0b29ac1) 2023-07-10 00:03:45 -07:00
9b5a84f544 2023-07-09 nightly release (dd6c38cb596199d70bba76688c99ed919d77002d) 2023-07-09 00:03:30 -07:00
3a919e00b8 2023-07-08 nightly release (dbc22168008711f68775e9d5b159295527a9f108) 2023-07-08 00:03:20 -07:00
760dafbb05 2023-07-07 nightly release (a358a9262e4b8c7280d1817ac73f24a1bf383da6) 2023-07-07 00:03:28 -07:00
72404877d3 2023-07-06 nightly release (13763f58ad86fadf49ef7960d1836318e6480d36) 2023-07-06 00:03:23 -07:00
758c84d41f 2023-07-05 nightly release (ea4d5c45385233af6be36aa50a6945ee9c704b74) 2023-07-05 00:03:30 -07:00
e5472fd3c3 2023-07-04 nightly release (2a21469a77a53a066a299225a3d4ddd60693e7a6) 2023-07-04 00:03:21 -07:00
7682252cac 2023-07-03 nightly release (9f7ad25c987b8b69ff3a0c2aabd55a7c032de7d5) 2023-07-03 00:03:20 -07:00
d33f3a7547 2023-07-02 nightly release (707d265db2dc1eeb8a1b95bc70def5a1e5af7752) 2023-07-02 00:03:23 -07:00
bb3df0bb7c 2023-07-01 nightly release (1de1bea60d96c75403beaa13b1ce80d0ea6ccdbd) 2023-07-01 00:03:41 -07:00
dc72046b23 2023-06-30 nightly release (89decc3a10137838cf4f7400af034645d52684e0) 2023-06-30 00:03:23 -07:00
7bc858743e 2023-06-29 nightly release (998c07799f22a0bb63a39c84d15eae29778631f4) 2023-06-29 00:03:27 -07:00
94ca800459 2023-06-28 nightly release (28d42e66e418f655fdca45c53d4e09d095e7e182) 2023-06-28 00:03:30 -07:00
43ec335ff2 2023-06-27 nightly release (968b7b5e0f892f993c65f8a365fb2df99359f09c) 2023-06-27 00:03:56 -07:00
176a02ed90 2023-06-26 nightly release (58feefa4ede4779d339d76b78b0e0a3de7d47212) 2023-06-26 00:03:58 -07:00
3bebfdfbab 2023-06-25 nightly release (86e0eda18d3665b9db6ea075cc3586210f4116c3) 2023-06-25 00:03:35 -07:00
27b3861096 2023-06-24 nightly release (63f66d19ea8973563edbb48c8887d2e6b4931408) 2023-06-24 00:03:56 -07:00
ad724c83fb 2023-06-23 nightly release (5a97c947c69dd248fb60eba6c8d1913720dfe2af) 2023-06-23 00:04:57 -07:00
d46b579c85 2023-06-22 nightly release (58d11159bd9bdb4db70901821bcae4ce13160284) 2023-06-22 00:03:25 -07:00
e4cf441a4b 2023-06-21 nightly release (f3c3d12efb363f61ca9d68ed9e55f345f1a7e388) 2023-06-21 00:05:18 -07:00
e928f55884 2023-06-20 nightly release (d4b85f3031b81ea9f0e53502ad3b831fb91ec39b) 2023-06-20 09:45:27 -07:00
2aa882f2ad 2023-06-20 nightly release (2d745b95d723641e575027bd4e2fff612f61cc8f) 2023-06-20 00:03:38 -07:00
5beeb400ca 2023-06-19 nightly release (2d745b95d723641e575027bd4e2fff612f61cc8f) 2023-06-19 00:03:41 -07:00
59c654a6ad 2023-06-18 nightly release (2d745b95d723641e575027bd4e2fff612f61cc8f) 2023-06-18 00:03:48 -07:00
a522f9aedd 2023-06-17 nightly release (2d745b95d723641e575027bd4e2fff612f61cc8f) 2023-06-17 00:04:09 -07:00
1b5af9c097 2023-06-16 nightly release (2d745b95d723641e575027bd4e2fff612f61cc8f) 2023-06-16 00:03:22 -07:00
0d4f9aee90 2023-06-15 nightly release (2d745b95d723641e575027bd4e2fff612f61cc8f) 2023-06-15 00:03:37 -07:00
487d6c278b 2023-06-14 nightly release (876161983d353f7553755a58b927762f09c86966) 2023-06-14 00:03:21 -07:00
246b53d588 2023-06-13 nightly release (45104cb67f5e0591bc246edb71f6771f95317cb7) 2023-06-13 00:04:05 -07:00
8aee9489c9 2023-06-12 nightly release (8a744c31d3c5194b4850869a112d94912c1e08b4) 2023-06-12 00:03:26 -07:00
ec23ae5ad4 2023-06-11 nightly release (03101a227f6639d5a9ad628d1dc300f9f99a8812) 2023-06-11 00:03:16 -07:00
dd69d6251a 2023-06-10 nightly release (0496d70aa076143aabde9ee0a010dcccb882eaa9) 2023-06-10 00:03:43 -07:00
b6ab779111 2023-06-09 nightly release (4cc474dec4c55fc11ee92e7c9366946744e60cdd) 2023-06-09 00:03:55 -07:00
c1406a99df 2023-06-08 nightly release (d5142c52d3592e4a5777834be80b276ed1073012) 2023-06-08 00:03:58 -07:00
6226b7d098 2023-06-07 nightly release (428bff842dbeb4bbef245b7c4355f8c76a4bd1b3) 2023-06-07 00:04:54 -07:00
4d648e450b 2023-06-06 nightly release (872fdb329b75c832c2cd1314b3613e28289d8fac) 2023-06-06 00:09:17 -07:00
7a5da81822 2023-06-05 nightly release (881307abcfd8fd229fa521098597f4becb2d6c86) 2023-06-05 00:03:25 -07:00
810edae513 2023-06-04 nightly release (992bffe5a3f030ac75152fcda4bdc6c8d7539962) 2023-06-04 00:05:41 -07:00
7726721661 2023-06-03 nightly release (39b04370db85313efe23966d04499970a882415c) 2023-06-03 00:03:32 -07:00
52c7a761c5 2023-06-02 nightly release (1ca2e993af6fa6934fca35da6970308ce227ddc7) 2023-06-02 00:03:32 -07:00
a14be7981b 2023-06-01 nightly release (bebb8b7c1e6f3baf35c00d2a9265a285b4e439a8) 2023-06-01 00:03:57 -07:00
4855233864 2023-05-31 nightly release (e7cc41772d65010749beab7734d29e32d1a7b494) 2023-05-31 00:03:46 -07:00
9148727f45 2023-05-30 nightly release (0e72ada9bba2693e55fd7177ab4d2af826d7d15f) 2023-05-30 00:03:31 -07:00
4316d8495a 2023-05-29 nightly release (af1d437654aa7fd5aa4a5d30ae2f2e9af34c7765) 2023-05-29 00:03:35 -07:00
3dedcb31fc 2023-05-28 nightly release (c3ea8cc58beec343fef8ec69c409e0b83e60a8e9) 2023-05-28 00:03:43 -07:00
c914be2d44 2023-05-27 nightly release (26f53bb8b0dfeb633fef405595bd4f30ffbf437b) 2023-05-27 00:03:26 -07:00
10b46f7c7f 2023-05-26 nightly release (fb468b6792213e0d8e6221b3bb51e71fcadbed30) 2023-05-26 00:03:28 -07:00
eb2ef134b4 2023-05-25 nightly release (3ca068bc44440e780298d250f295d2b68a8a9ae3) 2023-05-25 00:03:47 -07:00
41917f7d92 2023-05-24 nightly release (45843c7f41374d6d0f76050febc72582a650b472) 2023-05-24 00:04:57 -07:00
981d4c2578 2023-05-23 nightly release (8cab7994a6e92b9b37e324be493c04e4660dc330) 2023-05-23 00:03:25 -07:00
871fc7bb76 2023-05-22 nightly release (e9a7115605ee5b6ae38ea5716a4abea5aa415333) 2023-05-22 00:03:31 -07:00
1c938577d8 2023-05-21 nightly release (9e8da7fb44d190677d0e455bd0a85053311c27e2) 2023-05-21 00:03:38 -07:00
aa43e0d245 2023-05-20 nightly release (0c6f409cdad073e8c64ef11b72c41d89ab7ee167) 2023-05-20 00:03:16 -07:00
61239df555 2023-05-19 nightly release (f66d5dd788b4f9f09790c705afe64f377624c621) 2023-05-19 00:03:32 -07:00
329bb2a33e 2023-05-18 nightly release (900ca4df595fbd7370534cec9d4a0f781667dfd0) 2023-05-18 00:03:41 -07:00
174d01bc93 2023-05-17 nightly release (41d668c9dcad40589609efab899131f0acd188f1) 2023-05-17 00:03:24 -07:00
ed0910596d 2023-05-16 nightly release (799ef7e501003ef988e23a724789c0dbbd304ebb) 2023-05-16 00:03:32 -07:00
f3e13d9567 2023-05-15 nightly release (7dd8e08817ee59c926922409062e25f30408469b) 2023-05-15 00:03:40 -07:00
7a20f535c0 2023-05-14 nightly release (674e52b0b913d7b7f733ce1e73a42cb383860d55) 2023-05-14 00:03:16 -07:00
d6e2903b44 2023-05-13 nightly release (387b369ee4d0d94b616254e26e43ab16a30d83f4) 2023-05-13 00:03:15 -07:00
1a3d3669ef 2023-05-12 nightly release (e98d762f21083775598e7c1d96b2b9f6b10ea9d4) 2023-05-12 00:03:39 -07:00
6166b3dd77 2023-05-11 nightly release (da02ccc60e225c3150e695ddf724799dac961806) 2023-05-11 00:03:31 -07:00
b204f65758 2023-05-10 nightly release (b004c0b3c6a1ee39ba0b512a00d95e7f83852556) 2023-05-10 00:03:45 -07:00
6cbfabc45f 2023-05-09 nightly release (75e4214f92059aa3ab2931e31ace63f598ebec25) 2023-05-09 00:05:18 -07:00
9a8c655956 2023-05-08 nightly release (7d0e4e2aa843ef5d73646f5b304914d2b65db93c) 2023-05-08 00:03:36 -07:00
3a5c9bb5c8 2023-05-07 nightly release (ca9f55f79d944672cb93157836f8ee92f54d2e10) 2023-05-07 00:03:25 -07:00
cf05864b32 2023-05-06 nightly release (f73973d789a14afe9fde4640c767f2da0b547076) 2023-05-06 00:14:18 -07:00
ccace360e0 2023-05-05 nightly release (bf52d570d9be0167501cd6a206b7e262e6d3860c) 2023-05-05 00:03:25 -07:00
1b6d6e98fe 2023-05-04 nightly release (9bc68fcd25798553e311d8562fe3a1c5a450feb3) 2023-05-04 00:03:34 -07:00
664ac1ebd2 2023-05-03 nightly release (771a9debbe8cc1bbc41e1f9b8759971ae20f09d2) 2023-05-03 00:03:41 -07:00
d97f26abcb 2023-05-02 nightly release (ff297223641ae336c421994a9d650bbb72798c0c) 2023-05-02 00:03:51 -07:00
f38a04a5c5 2023-05-01 nightly release (e779a30d5097714acea011da6a554e43810b5d0e) 2023-05-01 00:03:22 -07:00
572ef38a2f 2023-04-30 nightly release (6c934a89a725fd5d171b52a37cbc58e198edf4d6) 2023-04-30 00:03:28 -07:00
6d1cfc04be 2023-04-29 nightly release (3d55bce3bf5fe5cb0d654ecb48100eeac1d2dfa0) 2023-04-29 00:03:24 -07:00
4fecad50d3 2023-04-28 nightly release (54c0edf6da01c5ded74fe67e834b3c6869d5e5f0) 2023-04-28 00:03:30 -07:00
521b386e57 2023-04-27 nightly release (e0bf51d3bf2cf4c8272708f78b72572d81efd0a2) 2023-04-27 00:03:25 -07:00
a977a12ca5 2023-04-26 nightly release (0b1b063158e35be06e488e29a302ef28932b083b) 2023-04-26 00:04:52 -07:00
015e4ddb72 2023-04-25 nightly release (ecd2c71871f8bf9a9fa4a4d875609b0922061a6f) 2023-04-25 00:03:17 -07:00
287a30305a 2023-04-24 nightly release (48d112c431fc988de5473d3992f6c72ec5f02a6b) 2023-04-24 00:03:20 -07:00
1b3ac4a14b 2023-04-23 nightly release (ebd47b0eec9af20b238a72860dfc08c0bf250fdb) 2023-04-23 00:05:36 -07:00
f85810f9fc 2023-04-22 nightly release (ca8625f45662e5d451d1da928b223e47d79ce5fa) 2023-04-22 00:03:25 -07:00
8aa785fa2b 2023-04-21 nightly release (51742a467d1714afb6715af92c4459160eb71fb4) 2023-04-21 00:03:24 -07:00
aef7b6b424 2023-04-20 nightly release (95ca8e589d8aaa614619e4dcd89c569cf0ad8dea) 2023-04-20 00:03:30 -07:00
b206fd2f6a 2023-04-19 nightly release (8e69879209330089fdd8f2cd48769dc467c7d036) 2023-04-19 00:03:07 -07:00
ffa6fefab6 2023-04-18 nightly release (436edc5ac3de4c4e677ed136473bafe72002cc93) 2023-04-18 00:03:24 -07:00
584d16f2f2 2023-04-17 nightly release (efc3887ea508b3cfd94603fd8afe4e8cf6dce7b7) 2023-04-17 00:03:23 -07:00
fea73cb39e 2023-04-16 nightly release (039faf0dbf75c8e6bb3c097c1b8d257eebb74c45) 2023-04-16 00:03:27 -07:00
ba53e304d4 2023-04-15 nightly release (e9be0b0fb93c271ba3deb31b5ccc8f4c4f602ef9) 2023-04-15 00:03:09 -07:00
30a9757c80 2023-04-14 nightly release (cd078d376e833c4da3b8a6b03447614501178e9b) 2023-04-14 00:03:21 -07:00
3e5bc3f6bb 2023-04-13 nightly release (ccc9a3d726b1f249daffa442460b70fb50a0ee2e) 2023-04-13 00:03:28 -07:00
84ec5d9b1f 2023-04-12 nightly release (6ff32b5575abf07ebee74c9e17fc5113d5dd0652) 2023-04-12 00:03:28 -07:00
61007ca987 2023-04-11 nightly release (d4ce045cfcf2d7cacd0b90d7ea625f6fa82d566b) 2023-04-11 00:03:18 -07:00
b8423c1370 2023-04-10 nightly release (96fb64a1599f3e1679baa9db16f71c8a854577c7) 2023-04-10 00:03:15 -07:00
e60d9b7507 2023-04-09 nightly release (54b168484d8463a6f38737751ab5ac78777fa6a6) 2023-04-09 00:03:15 -07:00
f3a098a55a 2023-04-08 nightly release (d255c8e1ad332a2ded0ff1595b63d50034c473ee) 2023-04-08 00:03:14 -07:00
83b1c515f4 2023-04-07 nightly release (616f50da3aa8c0616cf93f1d92f31044aa5123f8) 2023-04-07 00:03:22 -07:00
a86aae1504 2023-04-06 nightly release (9c7b03d51e097c40abdb9ef55ce79b98d7f1fd48) 2023-04-06 00:03:13 -07:00
73b995ec2d 2023-04-05 nightly release (2d9b2bcfba7eaf9aa30e05df4ff51fa9f3241e9e) 2023-04-05 00:04:29 -07:00
5fa1402662 2023-04-04 nightly release (c5963b779281e2b7dfba1b2fc0e6628c1622f7e4) 2023-04-04 00:03:17 -07:00
3b8052d979 2023-04-03 nightly release (5d62d1255778b53ece16c79fd842cd42eca31f93) 2023-04-03 00:03:10 -07:00
5775e1c1e6 2023-04-02 nightly release (7fcff01b50d6a3a94a181a5b452455e8a82efad3) 2023-04-02 00:03:04 -07:00
63f61a66b0 2023-04-01 nightly release (57c6f3fe90bccfed878b301c2d61bf42270c5b52) 2023-04-01 00:03:04 -07:00
f08dd7d0f7 2023-03-31 nightly release (3dc440527817b065fa5cb652467921c9514cdd1c) 2023-03-31 00:03:21 -07:00
f6aae5043e 2023-03-30 nightly release (af0264ae084359c8042194f5e15070aafb430677) 2023-03-30 00:03:19 -07:00
f1f0a4f064 2023-03-29 nightly release (91166ef7e75fc5fb94fb56ec6679a1744cf0f28c) 2023-03-29 00:03:21 -07:00
81558034b3 2023-03-28 nightly release (c1a6dde79e5517ba3f71dcd4b9ff8f00e0cd881d) 2023-03-28 00:03:26 -07:00
cb970ea2e5 2023-03-27 nightly release (542fb0b1fad6bf61929df16e2133e9a296820f08) 2023-03-27 00:03:29 -07:00
c9ed10c377 2023-03-26 nightly release (dc45ad702466e4a73d972d3e1dc0c12ed80d2eef) 2023-03-26 00:03:18 -07:00
13581257b1 2023-03-25 nightly release (2e8086b0a157dbe922a588fed15edc31c8faf802) 2023-03-25 00:03:16 -07:00
333a93ff4d 2023-03-24 nightly release (763c5a33e70113253e5861ee6d185ea343acf286) 2023-03-24 00:03:19 -07:00
d3b915155a 2023-03-23 nightly release (c5b65032aca9af04ce9d261bc29ff7f063b00748) 2023-03-23 00:03:18 -07:00
00891e96e8 2023-03-22 nightly release (f9a9a888128625969be1ff6effca9df74a55432c) 2023-03-22 00:03:31 -07:00
8c8491fe4c 2023-03-21 nightly release (d2f57229969d1758369a15a71740a94f5baa2cd6) 2023-03-21 00:03:32 -07:00
4d3baf684b 2023-03-20 nightly release (5471621497ea0068bd453d251bf5ec2621e8119f) 2023-03-20 00:03:23 -07:00
422a1d3b9a 2023-03-19 nightly release (c1214ce5c26fce541a920bdf9917c9ca9f63ecb0) 2023-03-19 00:03:16 -07:00
21b32e8b01 2023-03-18 nightly release (b132220309add1748f4548b88d7a947c19322ff9) 2023-03-18 00:03:06 -07:00
fb9db7dccf 2023-03-17 nightly release (cbd3df93c4a4b5d8a3244be9fbfa68e2b06bb836) 2023-03-17 00:03:19 -07:00
e262e404bd 2023-03-16 nightly release (3606f593665f2dc3ad0877db4f5df69a6c31cf2f) 2023-03-16 00:03:27 -07:00
4f7cbd0a3a 2023-03-15 nightly release (c6a82e433924b4d36fd571d36ce363cb1c622c76) 2023-03-15 00:03:27 -07:00
b5e03345ec 2023-03-14 nightly release (2cc8368af369c6c3d930ca9ad85c7124e4809509) 2023-03-14 00:03:11 -07:00
eb17fc7b11 2023-03-13 nightly release (be220690d9d8b7e8d58b81694c6cbf0958f94845) 2023-03-13 00:03:17 -07:00
1238ae3e39 2023-03-12 nightly release (82d3d053b9cf439a1970d788a7809cdc30142626) 2023-03-12 00:03:14 -08:00
cf353b03fd 2023-03-11 nightly release (f3fc4d035d8fc1ed560ade0c8b8eff6be1c95972) 2023-03-11 00:03:08 -08:00
c54ce93106 2023-03-10 nightly release (065de4301200117c65607b213322cbde800b50f8) 2023-03-10 00:03:08 -08:00
820eb13ecf 2023-03-09 nightly release (fe05266fda4f908130dea7cbac37e9264c0429a2) 2023-03-09 00:03:09 -08:00
47cb44946c 2023-03-08 nightly release (3a4275278b265f02e9d5fa336074d27b908dd1ea) 2023-03-08 00:03:24 -08:00
24e305c3c6 2023-03-07 nightly release (7038458c5b47eda626e9d535f792f1f4611c5675) 2023-03-07 00:03:12 -08:00
d7e5a324d5 2023-03-06 nightly release (02792ff16f4934ccd837c28df437de460273432c) 2023-03-06 00:03:05 -08:00
98e5a7eaf1 2023-03-05 nightly release (af8dbe7ec2b32492589bad1eef4ed72be81646f9) 2023-03-05 00:03:17 -08:00
f1d60f5872 2023-03-04 nightly release (43dd043ea79cd427aa40f1910da63da96e8363ec) 2023-03-04 00:03:07 -08:00
a9a8c5ea9e 2023-03-03 nightly release (53c9866ffa8baa7b7070901ce74704e0771bb05b) 2023-03-03 00:03:07 -08:00
5bdd05400a 2023-03-02 nightly release (62b775583f008effc510e5f5c3e2b30a85a53465) 2023-03-02 00:03:20 -08:00
f09ed1d3c2 2023-03-01 nightly release (21b1134be690bdf338689ed0da287e772e8fd859) 2023-03-01 00:03:19 -08:00
341911ad3e 2023-02-28 nightly release (58648822b640dc11d196f7060d968b2c38fb3351) 2023-02-28 00:03:13 -08:00
1e2e6e78c6 2023-02-27 nightly release (6624a73837bee5b59f01bdd4d2f0d5619bd3ab1f) 2023-02-27 00:03:21 -08:00
3313e86c03 2023-02-26 nightly release (a88bfc60c75d22a047f03787aaa43130c6a8d6d9) 2023-02-26 00:03:10 -08:00
f64b61f58c 2023-02-25 nightly release (f5cf1a8b434a39576f8f49f4ebb32741de5cb9c1) 2023-02-25 00:03:17 -08:00
dbde255a2c 2023-02-24 nightly release (6665fe9e65548ba01cb232b965af4d64a49fb46b) 2023-02-24 00:03:19 -08:00
2a2c29b984 2023-02-23 nightly release (cece63f1976464f3c48b5563d8a59888566d0897) 2023-02-23 12:55:17 -08:00
5a6cf2804b 2023-02-20 nightly release (286d821e61e29de8fd6c81abd78b84fea5a44c0b) 2023-02-20 00:03:06 -08:00
66f63bd56f 2023-02-19 nightly release (a17a7ccc92144452e0fe51e02f21f1f1ba88118a) 2023-02-19 00:03:06 -08:00
844f9f697e 2023-02-18 nightly release (e44737e61975c49b20fa176e77c012b626c5f331) 2023-02-18 00:08:44 -08:00
fe66daafaf 2023-02-17 nightly release (766d51b496bb85feab4b0f23977b4ffc0a0e354b) 2023-02-17 12:30:53 -08:00
9c72352263 2023-02-17 nightly release (5d1e9fd214fcd8bbc95e9f8dea69df8dd17bf92e) 2023-02-17 00:03:19 -08:00
5fe2e6ac60 2023-02-16 nightly release (07bc6b958768af3462095eea7af2cbc7b395b972) 2023-02-16 00:03:03 -08:00
de1114554c 2023-02-15 nightly release (c10acb834d73f399589ce08a9fea1e4520a8c02c) 2023-02-15 00:03:08 -08:00
e67dc17b06 2023-02-14 nightly release (d567df9f36094e1efa89d11599067773f45032c8) 2023-02-14 00:03:15 -08:00
8397282212 2023-02-13 nightly release (6fadd5e94a273d62ac3f245a7615f818b6a33500) 2023-02-13 00:03:07 -08:00
52df37a117 2023-02-12 nightly release (67d979098567fd61dfdb918d837426535eb9883b) 2023-02-12 00:03:02 -08:00
00a8867aba 2023-02-11 nightly release (948cd61afc90e1b9067b35d4aec4ec74deeb73f6) 2023-02-11 00:03:09 -08:00
54ad459fb0 2023-02-10 nightly release (336d9354d655e52c575d070fc53eaccbebc94cd2) 2023-02-10 00:03:18 -08:00
34d2472fc0 2023-02-09 nightly release (cb715c26e270759e3955968fbd3d763d51b0b83d) 2023-02-09 00:03:07 -08:00
cb47373166 2023-02-08 nightly release (61ecaf1dd40ba93dde11249bd8ee64274098d3d7) 2023-02-08 00:03:11 -08:00
1530b798ce 2023-02-07 nightly release (d493bc8a764f65c140e73d7a6e9d20abe519a9b0) 2023-02-07 00:03:12 -08:00
0a73b08b9f 2023-02-06 nightly release (9350bcf6ae9d646389a0a4345c48275d4f9e4d1a) 2023-02-06 00:03:02 -08:00
6a03ad6a34 2023-02-05 nightly release (1d53123f44e2d5f08e4605af353b7d32b62346ae) 2023-02-05 00:03:01 -08:00
3b05e02983 2023-02-04 nightly release (adde6fd25eec0bd5ebe304ca08a9d57ba64d10d5) 2023-02-04 00:02:59 -08:00
df9abf685b 2023-02-03 nightly release (732a865c1bf2382a75f531d5907d18dd9bb0c4c9) 2023-02-03 00:03:13 -08:00
bf27152a8a 2023-02-02 nightly release (569f2e3228efeb1f8571e96d8f3ec835239e9c0e) 2023-02-02 00:03:05 -08:00
2e6952fa58 2023-02-01 nightly release (79db5bcc9d3febad00e5a2234b44c7db87defdab) 2023-02-01 00:03:07 -08:00
b2690c3cea 2023-01-31 nightly release (2a6e08570442c8fcbad6659e074c226809b1ed22) 2023-01-31 00:03:05 -08:00
3a7bab55b2 2023-01-30 nightly release (e790281a85fe3693fc1d38bf0e2c6e874d5e10b0) 2023-01-30 00:03:07 -08:00
d6c87398e2 2023-01-29 nightly release (7cc91f4002cb5dd3290318a967fb1e830d812666) 2023-01-29 00:03:10 -08:00
5d6a4f697c 2023-01-28 nightly release (ca8f5e177a1e0781b4a9e376c34d3ef9b934a454) 2023-01-28 00:02:59 -08:00
6cbac32dc7 2023-01-27 nightly release (661800a2cf0a4251c48a50fce8599c4c5fc49be1) 2023-01-27 00:03:02 -08:00
8181f12af5 2023-01-26 nightly release (0a57a20c02132577a48f5283ffbdf04257af1dbf) 2023-01-26 00:03:01 -08:00
83e8426fa4 2023-01-25 nightly release (4bc0491752b6d891c5bb33a3d18c5da7334fc6df) 2023-01-25 00:02:59 -08:00
d0e7a12a39 2023-01-24 nightly release (2a8669c54cb29bf42692185340fd68f341ac65ef) 2023-01-24 00:02:57 -08:00
6f611c5030 2023-01-23 nightly release (20bf77f9bd3cd7cb4da758afb58042e2980ee6ba) 2023-01-23 00:03:12 -08:00
284677213e 2023-01-22 nightly release (ed07070a11802dd561995c20e89d33cc986de2fa) 2023-01-22 00:02:58 -08:00
f960e5c8ae 2023-01-21 nightly release (1af40d5108ee5ddd408eed8115ce3abf6512149c) 2023-01-21 00:03:09 -08:00
d7b0db060c 2023-01-20 nightly release (620846c8b4f00f58dbd8b62746ec8e4328552b58) 2023-01-20 00:03:12 -08:00
65d7b967f8 2023-01-19 nightly release (de459bdfaa44a25565d4c4eedda49906feb6d993) 2023-01-19 00:03:04 -08:00
69dbc7dca8 2023-01-18 nightly release (00fe63d1d8c5c6411e093fa4388dc98c3683d236) 2023-01-18 00:03:34 -08:00
fbc4086458 2023-01-17 nightly release (0388400f3f8a8ecae2f809ba40ca3ddd5a8b9028) 2023-01-17 00:03:09 -08:00
a4dd47e06e 2023-01-16 nightly release (85e0fd0280948a342a916429448fed2486e82aa5) 2023-01-16 00:02:55 -08:00
868797ac1f 2023-01-15 nightly release (d41b5d7c145f3e09c7223c2b707933266241ec9b) 2023-01-15 00:03:08 -08:00
c202ddde8d 2023-01-14 nightly release (b8057aa16d376eefe081d852335541e2a7609c40) 2023-01-14 00:02:59 -08:00
06f81fdbf8 2023-01-13 nightly release (fbbb19599a1d162e5927542ed251fd2ba63d5163) 2023-01-13 00:03:32 -08:00
a0c46f5ab8 2023-01-12 nightly release (5766764d6cf9b14380c1b6ef5c2fe5392722e37b) 2023-01-12 00:03:08 -08:00
466be3f2ad 2023-01-11 nightly release (364f526b9cdf9818a7647b5e637efdee825d61a1) 2023-01-11 00:03:02 -08:00
971b1d38ec 2023-01-10 nightly release (b0f359a3c99041d9c87e700dccd0f22e73f40e1d) 2023-01-10 00:03:00 -08:00
65ff52a53b 2023-01-09 nightly release (73e5379fab05c40ff6f42500309cdc17ee57548c) 2023-01-09 00:03:08 -08:00
fac4361e84 2023-01-08 nightly release (73e5379fab05c40ff6f42500309cdc17ee57548c) 2023-01-08 00:02:56 -08:00
253e5c9744 2023-01-07 nightly release (faed4db4971af151e3dba7233ae49f9c0149dc18) 2023-01-07 00:03:12 -08:00
143e944699 2023-01-06 nightly release (33547bb58745a5deedaa36ca4e9363413f803e12) 2023-01-06 00:02:56 -08:00
1b52ceea52 2023-01-05 nightly release (d6bd67f2ebebb840ecf9d1c7c70653c900e6ee96) 2023-01-05 00:03:01 -08:00
caaf948ae3 2023-01-04 nightly release (80394bb73423bb849f46a7a9ffc55ebf478b8639) 2023-01-04 00:02:54 -08:00
74c03b46ae 2023-01-03 nightly release (2b52db9c953d063db7b46c12f4df35b47aca4381) 2023-01-03 00:03:05 -08:00
680efdb840 2023-01-02 nightly release (f6136331244fbe680ccc03d3bd5c5d9a71933d9f) 2023-01-02 00:03:05 -08:00
ede810cc26 2023-01-01 nightly release (f6136331244fbe680ccc03d3bd5c5d9a71933d9f) 2023-01-01 00:02:59 -08:00
733de4dc1d 2022-12-31 nightly release (73436af43fd84891b6765f4d1e5eeb0073a10a11) 2022-12-31 00:02:58 -08:00
a52fb6c041 2022-12-30 nightly release (9710ac653157dee780c33bf3080568076ce69a7b) 2022-12-30 15:03:00 -08:00
f8dcaca1da 2022-12-30 nightly release (6f034dc0b09a96c50421cf92ddb8709c59d95edf) 2022-12-30 00:03:12 -08:00
7edb2ee6a4 2022-12-29 nightly release (3d8834bdbf7f5da1163fd7ac543728779b557d29) 2022-12-29 00:02:58 -08:00
5abc365268 2022-12-28 nightly release (2e79d46708fb53c1a92bf0c2840d47a39e13e400) 2022-12-28 00:03:06 -08:00
4ada512fc4 2022-12-27 nightly release (3f4e87beaf67ec44d609605777d9da9e65cfbdd9) 2022-12-27 00:03:00 -08:00
9c156524e8 2022-12-26 nightly release (3f4e87beaf67ec44d609605777d9da9e65cfbdd9) 2022-12-26 00:02:58 -08:00
fac2cc8b24 2022-12-25 nightly release (5725a440806e7d75ad4fdea2c63d1a0a2fd004af) 2022-12-25 00:03:01 -08:00
3416c23868 2022-12-24 nightly release (5725a440806e7d75ad4fdea2c63d1a0a2fd004af) 2022-12-24 00:03:07 -08:00
7277d5a8f0 2022-12-23 nightly release (b95e1d76a86b7b66f0946f72ebd33889bfc19e03) 2022-12-23 00:03:12 -08:00
7919ae96db 2022-12-22 nightly release (6cea4f3d57927b30c3fc0a2f7103684fde0c75ea) 2022-12-22 00:02:55 -08:00
5837a6683b 2022-12-21 nightly release (5d37890b8eb952560f22b1b8ff661a024bf4b6d9) 2022-12-21 00:03:00 -08:00
8b450f6934 2022-12-20 nightly release (d6dd2e97da619319a103d1061290fe33ce33b6a4) 2022-12-20 00:02:58 -08:00
1ae3ad540d 2022-12-19 nightly release (9ca41a986c902f9db44ff04ca87a07238627d020) 2022-12-19 00:03:10 -08:00
e2c1293373 2022-12-18 nightly release (e2377c8300c578d648aac3bff6f96cdfce4308de) 2022-12-18 00:03:06 -08:00
8726996125 2022-12-17 nightly release (e689c509222a2dbc4796bcf32625193b9ea13c49) 2022-12-17 00:03:08 -08:00
2bd3fbe2e6 2022-12-16 nightly release (4372dbb89f457a0aa90f22d27f1468442a2a51bd) 2022-12-16 00:03:08 -08:00
1902ca9d53 2022-12-15 nightly release (670efb974af056fc3f46b48225581351b047eb4d) 2022-12-15 00:03:10 -08:00
971c769d42 2022-12-14 nightly release (769392178a8da6aab1bf82a09c9b105ea3dd6778) 2022-12-14 00:02:55 -08:00
3612f8bd54 2022-12-13 nightly release (7498e23bd52f049071500d443a18e6e5382c87b1) 2022-12-13 00:03:12 -08:00
008d7ee616 2022-12-12 nightly release (b95ea4f1494dacf86b5789f5466025b1fac1f2bb) 2022-12-12 00:03:01 -08:00
e220581718 2022-12-11 nightly release (6845598617ab75920dc1d4fbba9475bf64148bd6) 2022-12-11 00:02:56 -08:00
6e83e0b5d7 2022-12-10 nightly release (0457020d2ccfcb34262e2d46a3a4dbbdba5772a2) 2022-12-10 00:03:03 -08:00
8ce9be6212 2022-12-09 nightly release (b4c27c86b71c41bb44a804d8c6a7d466fb4d8baf) 2022-12-09 00:03:00 -08:00
9a7b94fa1f 2022-12-08 nightly release (bc93454e4a51f85f6c8603309bed7bd6de84385a) 2022-12-08 00:03:08 -08:00
c8ee46c764 2022-12-07 nightly release (26d1dbc4f85be7c5dc03c865e9bc092d83f7f2f0) 2022-12-07 00:02:57 -08:00
d80585d52d 2022-12-06 nightly release (05ccbd6d94c25fc26d30d2f60081924ce5bc973f) 2022-12-06 00:03:01 -08:00
ba7543762b 2022-12-05 nightly release (4648baa91155718642937ec27b04196fde846566) 2022-12-05 00:03:10 -08:00
b23c765574 2022-12-04 nightly release (9a1c6fd50664a46f0bacac92a10bcc26f307fadb) 2022-12-04 00:02:54 -08:00
ea0a17f785 2022-12-03 nightly release (9a1c6fd50664a46f0bacac92a10bcc26f307fadb) 2022-12-03 00:03:05 -08:00
82ab7407b3 2022-12-02 nightly release (cf3c3f22804be6909e54fc09e07f891ab0886774) 2022-12-02 07:14:00 -08:00
203213d3bf 2022-12-02 nightly release (f623b123f00358936932e103131bb6e3fc8ea685) 2022-12-02 00:03:06 -08:00
d8006fa84d 2022-12-01 nightly release (b399acd2dd755d7184538a58f933fbd48593c158) 2022-12-01 00:03:03 -08:00
30b87f46b5 2022-11-30 nightly release (12f98f85bc7effd48f4728dcaba6362c8b2dd697) 2022-11-30 00:02:57 -08:00
c528648f56 2022-11-29 nightly release (2b522670d2840522e0eeb87afec7f1956eaa8748) 2022-11-29 00:02:58 -08:00
77d59b2eab 2022-11-28 nightly release (db1f1144f1303db45e0b9d96e4bb6bdd87c80e5a) 2022-11-28 00:02:53 -08:00
f8506fb341 2022-11-27 nightly release (39449ea61d9a6644731687219282f610cbf7cf54) 2022-11-27 00:02:54 -08:00
21825b268c 2022-11-26 nightly release (591dfffa38848de54b7f5f4e49260847024c9281) 2022-11-26 00:03:03 -08:00
b8162a48b9 2022-11-25 nightly release (95ea47ef0c1cffe1fe05cc36bdc47c26cc72f13e) 2022-11-25 00:02:59 -08:00
c60a6bb340 2022-11-24 nightly release (903ae4570e401e5c4e42dc4a44cae37f805044a4) 2022-11-24 00:03:00 -08:00
db1da1f2ca 2022-11-23 nightly release (52669534438db3d680def4c70cb03b7e27566d7e) 2022-11-23 00:02:57 -08:00
f44ba7d75d 2022-11-22 nightly release (40cf214f2d18b3b8af5354ddc5dad8156ea32520) 2022-11-22 00:03:06 -08:00
e290a47a82 2022-11-21 nightly release (1856fa5df7fda9950da26eff2ef885e845bf6b6c) 2022-11-21 00:03:03 -08:00
89e792c046 2022-11-20 nightly release (6afe341276f9ffa660446c5fa15b68558791869a) 2022-11-20 00:02:54 -08:00
3806b657b7 2022-11-19 nightly release (419ef2cdcfe84442de5232739284c6a51a18632f) 2022-11-19 00:02:50 -08:00
d7eadde6dc 2022-11-18 nightly release (6f609dd0e03e11395cc637a34abd68472e5a1e12) 2022-11-18 00:02:52 -08:00
0015b0d515 2022-11-17 nightly release (c3acb9c8859fb5cfa1959ee49849f07942c40ccc) 2022-11-17 00:03:02 -08:00
0662e90840 2022-11-16 nightly release (e2f0648750f2d0d0ac648728ce4c514db178cfa1) 2022-11-16 00:03:15 -08:00
b5ea3fd1ef 2022-11-15 nightly release (73d71ae3d62607f2e480af37c470375ea405eb1c) 2022-11-15 00:03:36 -08:00
b7c4176df3 2022-11-14 nightly release (06486cd0087200e08ebb8a9518e064251c7c5309) 2022-11-14 00:03:41 -08:00
637228bcc4 2022-11-13 nightly release (46796fe5e9b74602d45927304773fdcda1c3215a) 2022-11-13 00:04:06 -08:00
580b16fa9b 2022-11-12 nightly release (7c3adddd6c3fe1bda4a9e5bfb9f992a802329551) 2022-11-12 00:03:49 -08:00
aeacdaed41 2022-11-11 nightly release (a6d72f44a4e8b6e9d2e878f30fd8b1d3e1197f0e) 2022-11-11 00:03:54 -08:00
e02bd6d2e4 2022-11-10 nightly release (e87c79ca0cbab476a7d09853b5830b615a62f679) 2022-11-10 00:03:05 -08:00
3b29687f0f 2022-11-09 nightly release (a7420d2ccb62d005f2e1853cfef8d25eb7748a90) 2022-11-09 00:03:19 -08:00
93f15b1b54 2022-11-08 nightly release (ed97e0aa2918e687309ee9a146c8294aefb237d2) 2022-11-08 00:02:58 -08:00
d6f7b88fc6 2022-11-07 nightly release (eda247ee6ce2f8bc29d86ec94f3863f929a2ea6e) 2022-11-07 00:03:09 -08:00
027d0aef88 2022-11-06 nightly release (0e3031f7e76fbd84e62650642dc334c11cc3c511) 2022-11-06 00:02:53 -07:00
12d78465eb 2022-11-05 nightly release (957a9b63c5c2953da3a1d1fc86c20703c96b2fa6) 2022-11-05 00:03:02 -07:00
4ebaafab95 2022-11-04 nightly release (70b00b13830c8adbaa2db8f61d475c2458b707c4) 2022-11-04 00:03:04 -07:00
a374be6433 2022-11-03 nightly release (4c20c0509d5cf8d4dea83cc330056044a6277b1b) 2022-11-03 00:04:06 -07:00
ded1b03613 2022-11-02 nightly release (95fc0bcaaddc2d24e8759f24dbefa789d04e9e42) 2022-11-02 00:09:24 -07:00
43ff4c443a 2022-11-01 nightly release (afd00673b6dedbdb811cfb1a9078deee1cb53f38) 2022-11-01 00:05:13 -07:00
7ceeba2c87 2022-10-31 nightly release (d13f1e6ab4d20451f7e2acd87571ffa7fece0c32) 2022-10-31 00:02:52 -07:00
d6a7672424 2022-10-30 nightly release (e7b854fae9ff8116eaf4aeb24e04cac550bed362) 2022-10-30 00:02:54 -07:00
7623f24837 2022-10-29 nightly release (c5cb6ec06619a2fc9874b967f11d13663c5d32c1) 2022-10-29 00:02:57 -07:00
8d1914359f 2022-10-28 nightly release (fd27246c16d8a80e7de0ccc86d014f9759611b0f) 2022-10-28 00:03:05 -07:00
7ecffceed4 2022-10-27 nightly release (21bef8e944c90cdf98c2ead4369410db252944e1) 2022-10-27 00:03:46 -07:00
3fa360a3c8 2022-10-26 nightly release (0d13ffbbae0ae12e72ed8856ccdd822bf840344c) 2022-10-26 00:03:07 -07:00
dfd50c05fc 2022-10-25 nightly release (3f94adc1056b541851422f887149d54756ed91c1) 2022-10-25 00:03:10 -07:00
9c14af470f 2022-10-24 nightly release (233305a852e1cd7f319b15b5137074c9eac455f6) 2022-10-24 00:03:41 -07:00
21d5156225 2022-10-23 nightly release (efdd43d5193435206fbe76cecc294961d10558db) 2022-10-23 00:03:06 -07:00
8e7fb10812 2022-10-22 nightly release (8461460d55c2474b236a5d7198067ed299631b76) 2022-10-22 00:03:46 -07:00
6418c7d4ba 2022-10-21 nightly release (ff43288d31ea7f3de69f4907e2a36455c742d9c9) 2022-10-21 01:01:25 -07:00
aab82cf32e 2022-10-20 nightly release (472bdb3aa84678b2faa4afe1cb5757f55e14ed9a) 2022-10-20 00:03:02 -07:00
f2aafd1c40 2022-10-19 nightly release (c471c29fdccc3fe48a78083c638a4a88559488b4) 2022-10-19 00:03:50 -07:00
bd3e718742 2022-10-18 nightly release (4814270708cb6141c1fb6202f883c084c71290b4) 2022-10-18 00:03:48 -07:00
cb329eb0ba 2022-10-17 nightly release (364a9973cab8e7458abd27e3926168978fe5428e) 2022-10-17 00:02:45 -07:00
d20ebc91e3 2022-10-16 nightly release (a0c2a7f2eda788a48f1d243940297f1467faf138) 2022-10-16 00:02:54 -07:00
1edc91820e 2022-10-15 nightly release (5210fab64d4322438ebfd8ec9c1170d5effab0a3) 2022-10-15 00:03:45 -07:00
987a944470 2022-10-14 nightly release (f451e824f39516f503c2bdfd785d254b447b9557) 2022-10-14 00:02:58 -07:00
f4ae3bad46 2022-10-13 nightly release (48c648d75df4a2d02ede71f34c11b7f48c80da0e) 2022-10-13 00:03:32 -07:00
2c2a7b256d 2022-10-12 nightly release (25725fd62448165b91647304c26d676db22b6955) 2022-10-12 00:02:49 -07:00
027a1549b9 2022-10-11 nightly release (ad2b04c39c41949d8869de743736bcaeec2dfa0d) 2022-10-11 00:03:23 -07:00
04f63b02a1 2022-10-10 nightly release (c89d286af633a802226c34ccbdd5c7c4be10dcfb) 2022-10-10 00:02:57 -07:00
0dbefb2414 2022-10-09 nightly release (92ac84c98a19310885f3d818aba56b981940d615) 2022-10-09 00:02:53 -07:00
95911a478f 2022-10-08 nightly release (a95889ba7c1ecd8cb0f90507a6152cb035bcefd1) 2022-10-08 00:03:00 -07:00
a23db27756 2022-10-07 nightly release (b14f1d7bb855834ec5f2d3996746e048ba835d69) 2022-10-07 00:02:47 -07:00
99cb9ab0a2 2022-10-06 nightly release (bebd1622490becd09de97003bd22761e973d3edd) 2022-10-06 00:03:11 -07:00
03587d440b 2022-10-05 nightly release (ac25c210e5452d360fcc8cf5ea96c85756e3e370) 2022-10-05 00:02:55 -07:00
9f3d8fec57 2022-10-04 nightly release (82d9592f1baaf943b81bca13a51d655139f050aa) 2022-10-04 00:03:13 -07:00
57d0543a3f 2022-10-03 nightly release (b26eafec079a18bc331f569a7e35497129feed71) 2022-10-03 00:03:10 -07:00
60de01321d 2022-10-02 nightly release (92c2295ab4b5ccdedcc32227c1125a4daf9e2759) 2022-10-02 00:03:40 -07:00
7ac93c0c8f 2022-10-01 nightly release (05d1128106e50075b0fd7d667680214ace34306c) 2022-10-01 00:02:54 -07:00
3bf7094ddb 2022-09-30 nightly release (95681929e4c379c504d8a7761f8104118a5a16db) 2022-09-30 00:02:59 -07:00
7a07ea9138 2022-09-29 nightly release (7628603aeeeb8ed160c2479f75175bb3ea028a42) 2022-09-29 00:02:52 -07:00
5b04269af6 2022-09-28 nightly release (795028a3cec2603a750bdc02ab2b93329f43e883) 2022-09-28 00:02:58 -07:00
04bb9533d5 2022-09-27 nightly release (b360d66391f03a0d5dc2c9a7aff496324b75aa2f) 2022-09-27 00:03:05 -07:00
193cc14176 2022-09-26 nightly release (9c036aa112b0a8fd9afb824d1fda058e2b66ba1d) 2022-09-26 00:03:02 -07:00
69168261cd 2022-09-25 nightly release (db40fbdee03920944219588464d38774ca0b3d05) 2022-09-25 00:02:56 -07:00
6dc8fba854 2022-09-24 nightly release (2b6d2cad29fc1652f80199d647306b9c7c841ca9) 2022-09-24 00:02:52 -07:00
114d9a2be3 2022-09-23 nightly release (cf0de77c2cfb8843b8ae67e6a6f053e6bf6bb3d9) 2022-09-23 00:02:57 -07:00
62b4e98396 2022-09-22 nightly release (99ad8a304898de8bf1e20a6fc12e335e9b7c5064) 2022-09-22 00:02:55 -07:00
8b575a966c 2022-09-21 nightly release (cf2f552cd8a41f4913c370c15804173a3b56a415) 2022-09-21 00:02:54 -07:00
b472915d42 2022-09-20 nightly release (e41d758e26bd2de00e9dd50e94e878f46f9f1b88) 2022-09-20 00:02:50 -07:00
1b1d4240c3 2022-09-19 nightly release (9024015adf01d93fd2533c71fa1e7f06831c2ac7) 2022-09-19 00:02:58 -07:00
39579e7294 2022-09-18 nightly release (12a19a4846c924e9d1e2d37fa0a706fb8eaef9a7) 2022-09-18 00:02:53 -07:00
e42e8dc76f 2022-09-17 nightly release (d6c2080eb49ccaaf43cff37b7f07a85906250b92) 2022-09-17 00:02:58 -07:00
089b57935b 2022-09-16 nightly release (0ec19db7ac88e307135100ddcfc418ae3925844f) 2022-09-16 00:02:51 -07:00
a5ce01692c 2022-09-15 nightly release (625e44c1df211d6753609a9b391cb10f2f94367f) 2022-09-15 00:03:03 -07:00
f35657efc9 2022-09-14 nightly release (55ca297d4e048c641d149a76f2fda7c9ce630ff6) 2022-09-14 00:02:52 -07:00
0feda8a4ba 2022-09-13 nightly release (2fbc0fab20d4af520f69f158f8777e99ad761e1d) 2022-09-13 00:02:48 -07:00
82246b020e 2022-09-12 nightly release (5f960db0e01839f1de8735060b374ea6cbd1713a) 2022-09-12 00:02:48 -07:00
6dd155f968 2022-09-11 nightly release (5f960db0e01839f1de8735060b374ea6cbd1713a) 2022-09-11 00:02:51 -07:00
3bcb7fae2d 2022-09-10 nightly release (96e4bd950027a2f472fafa98616c92403a890bd2) 2022-09-10 00:02:56 -07:00
f708f8d227 2022-09-09 nightly release (dbdc1cd590169576cfb78008f33b7cc795150729) 2022-09-09 00:02:53 -07:00
16ac031e95 2022-09-08 nightly release (e0229d6517385a98afeadbc6391d3592d5027c63) 2022-09-08 00:02:53 -07:00
e3c0d6d5b3 2022-09-07 nightly release (06ebe2d5bc1055f226f56ed2fe26a29038a466e5) 2022-09-07 00:03:03 -07:00
37d93c97bc 2022-09-06 nightly release (1a16b2576f69383480e8be889531e4f574356c62) 2022-09-06 00:02:49 -07:00
83d53043a3 2022-09-05 nightly release (9a6aa9053f79127721875e371addd9c3baeaaac0) 2022-09-05 00:02:50 -07:00
a3ee509f45 2022-09-04 nightly release (e46c1c7931da2d723a6cad4ec307ff4ed4e9cb7f) 2022-09-04 00:02:57 -07:00
a70ad97d6c 2022-09-03 nightly release (2a332afbf41b68080a9436e910b93af7cd336fbc) 2022-09-03 00:02:51 -07:00
e0735fd259 2022-09-02 nightly release (97b2dff60081e1092cfd6d1b3a80c995ff3d6148) 2022-09-02 00:02:44 -07:00
467eb02f43 2022-09-01 nightly release (693ed8b14777d1515c18653f5f8f28a602898662) 2022-09-01 00:02:48 -07:00
325dcc2a47 2022-08-31 nightly release (71d99662a0d7f8a9ad68999c9a014b71591cbb68) 2022-08-31 00:02:57 -07:00
e47d389287 2022-08-30 nightly release (df98c529480b2ece3809b19fc850f57d2054605a) 2022-08-30 00:02:48 -07:00
b044e4c13d 2022-08-29 nightly release (533203f5aaa9f8987f25d828e1c37e755a2ba4ea) 2022-08-29 00:02:46 -07:00
8cdff0bbfb 2022-08-28 nightly release (c9b144ff47ff3b6f358752976d29ac61f2b9b070) 2022-08-28 00:02:45 -07:00
40043c8960 2022-08-27 nightly release (e33897cb9999f124bce126c7e43f96c0755413ef) 2022-08-27 00:02:48 -07:00
733c645ec1 2022-08-26 nightly release (15b560a5c4d638c82e738f3496e2faf95fc328a5) 2022-08-26 00:03:00 -07:00
b7c5e1bc42 2022-08-25 nightly release (b21a6ff6397b74c148c12e4fc41ef12b382443e2) 2022-08-25 00:02:45 -07:00
503de157b7 2022-08-24 nightly release (754d7f05b6841e555cea5a4b2c505dd9e0baec1d) 2022-08-24 00:02:56 -07:00
a0ea0f5fc9 2022-08-23 nightly release (b29a074882a2194d61f1cd7ccf939618d8384d08) 2022-08-23 00:02:51 -07:00
85a04f1c91 2022-08-22 nightly release (765fd77d9a96983e1a2adf496ac2fe66b4825f45) 2022-08-22 00:02:45 -07:00
5c72ae78f9 2022-08-21 nightly release (c9475fa927ef5557aa54e4e9a7bc2a9ab98cdcf7) 2022-08-21 00:02:41 -07:00
1af6aa6752 2022-08-20 nightly release (9732a7d84ee72521d006c9617430c4415016daef) 2022-08-20 00:02:48 -07:00
ba442f621d 2022-08-19 nightly release (61b2cde5270986476f47b58b984de80d02aac321) 2022-08-19 00:02:49 -07:00
2c8f75b138 2022-08-18 nightly release (7263450c309443a8fd3f8ab29fbc04c35692e58f) 2022-08-18 00:02:45 -07:00
89a0e3da4a 2022-08-17 nightly release (343b5f86512f75f8f3bd4b90749c0459743b9e72) 2022-08-17 00:02:53 -07:00
937b90ea32 2022-08-16 nightly release (641d75d0ba0053816a73a6c977ac4a2d6e00e896) 2022-08-16 00:02:55 -07:00
c979188ed7 2022-08-15 nightly release (fbe8c77427ea95eb76876f20239e9493f2a1d6ec) 2022-08-15 00:02:58 -07:00
202caa35b2 2022-08-14 nightly release (408fa38f33d10e02fe762ef9d444ec9fc4f41ffc) 2022-08-14 00:02:53 -07:00
d773851902 2022-08-13 nightly release (c2808571bf823a26e97231d83f5f3f2c5e3d3fe3) 2022-08-13 00:02:44 -07:00
eb92c0d89f 2022-08-12 nightly release (2ca721cda54bcaee2202819bee92088ae2320ebf) 2022-08-12 00:02:44 -07:00
1cdc061e46 2022-08-11 nightly release (3aeb5e4ff9d56ecd680401cfa3f23e97a279efbe) 2022-08-11 00:02:45 -07:00
8a8283d468 2022-08-10 nightly release (e1007950484aa1df4a2f87c9c14b514ffd7736a5) 2022-08-10 00:02:48 -07:00
61f112cb3a 2022-08-09 nightly release (9e65e93c39238ec05aa7913693d7c3e4523bf257) 2022-08-09 00:02:55 -07:00
2085de7d43 2022-08-08 nightly release (1d56ea5e92b7bd041cf84ba9e0bb73f47a7eacf9) 2022-08-08 00:02:44 -07:00
c506576280 2022-08-07 nightly release (4f255dbfb320e8b30f83b35dd4a40c8abd3164fc) 2022-08-07 00:02:54 -07:00
5244615fad 2022-08-06 nightly release (86437b8631918aa4e766df869bca43853f0be6e9) 2022-08-06 00:03:07 -07:00
2869946265 2022-08-05 nightly release (802a4fd28620f03d6537374953dbb4bb887a47b4) 2022-08-05 00:02:47 -07:00
a2b3a3801c 2022-08-04 nightly release (1164c83c3c3070c82618350d57c7bf65ec56f0f1) 2022-08-04 00:02:53 -07:00
881e41c398 2022-08-03 nightly release (9647bec0ec41176adfd0cd5b80ab72c5bf6b2141) 2022-08-03 00:02:54 -07:00
a05de36989 2022-08-02 nightly release (c6b1e096afeabcc118c2e74db6709a1b4c531c53) 2022-08-02 00:03:06 -07:00
f059422a11 2022-08-01 nightly release (53f56894ae0b747d2833ee168599afcfdd4be471) 2022-08-01 00:02:55 -07:00
4a3790b3f0 2022-07-31 nightly release (32cf6c6fb0d98f747f576f52abe533449ae5da84) 2022-07-31 00:02:43 -07:00
f623d1fb75 2022-07-30 nightly release (dc53cb4536839d11ab9a32ec10a0ff0dd96a590f) 2022-07-30 00:02:47 -07:00
5e354516c9 2022-07-29 nightly release (357b7d589c9c451f4850449b9313d8b857ae7140) 2022-07-29 00:02:43 -07:00
053ea30ff8 2022-07-28 nightly release (7f7c81c5f9c4ef92085c1081dfb0f037f768bf27) 2022-07-28 00:02:46 -07:00
27064dc6e1 2022-07-27 nightly release (3cf9c3d8769fa3509c190a4f4c3dacd61ba154af) 2022-07-27 00:02:42 -07:00
983d26963e 2022-07-26 nightly release (1223e94469682271c94ac3b6a0cbe31a3078e351) 2022-07-26 00:02:44 -07:00
f0bd47f927 2022-07-25 nightly release (64094d81fe3cf512d3dfc6b2aabfd1754c86e87d) 2022-07-25 00:02:52 -07:00
63efe6b923 2022-07-24 nightly release (194255bb56a7bdcb5e6fb236d82b4897c2e5967c) 2022-07-24 00:02:41 -07:00
c747033c31 2022-07-23 nightly release (35d97e21c8e2e28fae4a4744ce42f1544972ba1f) 2022-07-23 00:02:52 -07:00
dae37da22c 2022-07-22 nightly release (12cb26509a30077f4bd11972dea9baa03c8d943f) 2022-07-22 00:02:47 -07:00
d7d8cf9170 2022-07-21 nightly release (2fb2740ef976ab6f139c5ccfa17c5c265bd2f78c) 2022-07-21 00:02:37 -07:00
bb7af45ebb 2022-07-20 nightly release (589e8a1da5aefeb27c2187b8f0d11471edb10153) 2022-07-20 00:02:46 -07:00
ab98dd1924 2022-07-19 nightly release (a8f4011e90aeffac0757bfdb971b139e1d348967) 2022-07-19 00:02:52 -07:00
8e9ce913ff 2022-07-18 nightly release (471397d0ee27b61ae1a088ece9f3ead1562152e0) 2022-07-18 02:51:54 -07:00
6f50b0393b 2022-07-18 nightly release (86b86202b58a6e3e7ccde32c51f81eff2e4e6445) 2022-07-18 00:02:28 -07:00
64c005dbd7 2022-07-17 nightly release (0b5b10002af2dce031d4fad337728f81c58a8b10) 2022-07-17 00:02:36 -07:00
468bcbe765 2022-07-16 nightly release (00359ff886e69f064360e5e8c6f2d5c306a105f1) 2022-07-16 00:02:48 -07:00
888ece62d5 2022-07-15 nightly release (75fdebde6290e2955129e665c2b0d99c391542a7) 2022-07-15 00:02:49 -07:00
c77e5b3ace 2022-07-14 nightly release (12c30a8250d1a00a0ac2e03d70ec909c76d922c3) 2022-07-14 00:03:12 -07:00
608f835654 2022-07-13 nightly release (07f4dc9b2c4cf6a00aa12cfb1d5db9056475ae98) 2022-07-13 00:02:53 -07:00
564e65cf0a 2022-07-12 nightly release (28776c45e393ec7d6731a5613f5d79d309036d3a) 2022-07-12 00:03:24 -07:00
4423cee876 2022-07-11 nightly release (3c2199b159b6ec57af3f7ea22d61ace9ce5cf5bc) 2022-07-11 00:02:50 -07:00
1ccfb24e91 2022-07-10 nightly release (f3008be9004418c3651bf89de764355580b994a2) 2022-07-10 00:02:37 -07:00
f1ef8f4871 2022-07-09 nightly release (39f659c3baa491cc060f7df6b55354bd78c30be1) 2022-07-09 00:02:45 -07:00
c10163e770 2022-07-08 nightly release (8389ccbcd809e2596bd4f8a20d069fc919beefdf) 2022-07-08 00:03:18 -07:00
19bac0d4c9 2022-07-07 nightly release (74877943b84cb6d3fc82993ebc8f0e2f8b3c5ce1) 2022-07-07 00:04:30 -07:00
76c411ec8d 2022-07-06 nightly release (fe361dede45747067e39669573cbcf48be4fe4f9) 2022-07-06 00:03:00 -07:00
5728ca13ae 2022-07-05 nightly release (828c787ea98da39eb786925eedcb8527aae07153) 2022-07-05 00:03:21 -07:00
4492525e2e 2022-07-04 nightly release (19f3d4d7950daef5c0e93a7c6da37353381da3d9) 2022-07-04 00:02:45 -07:00
14908b1f7d 2022-07-03 nightly release (19f3d4d7950daef5c0e93a7c6da37353381da3d9) 2022-07-03 00:02:44 -07:00
fa0f6bad6d 2022-07-02 nightly release (19f3d4d7950daef5c0e93a7c6da37353381da3d9) 2022-07-02 00:02:33 -07:00
b12db36e77 2022-07-01 nightly release (ffae7308c9bfbe52964577e35b3c1fdbcfb28e66) 2022-07-01 00:02:38 -07:00
67434681aa 2022-06-30 nightly release (331c0c18033bf4138c9ea468a8c759865dd8ffff) 2022-06-30 00:02:54 -07:00
5452b8baa5 2022-06-29 nightly release (5943aaa0c45bddef9b577cd0483627144585af0d) 2022-06-29 00:02:41 -07:00
2010b6df9a 2022-06-28 nightly release (8a45ef23f564cc092fa0805ea6c0f8f73eed209c) 2022-06-28 00:02:22 -07:00
3245693a02 2022-06-27 nightly release (80b50dfa3ab16de3e90dab8eeed003a98a0da1fe) 2022-06-27 00:02:44 -07:00
5074dab26d 2022-06-26 nightly release (80b50dfa3ab16de3e90dab8eeed003a98a0da1fe) 2022-06-26 00:02:31 -07:00
7b6fd33c64 2022-06-25 nightly release (80b50dfa3ab16de3e90dab8eeed003a98a0da1fe) 2022-06-25 00:02:45 -07:00
2befd0b446 2022-06-24 nightly release (55aa0b1a840e7bf5c53c8e457abf85ded92317eb) 2022-06-24 00:04:40 -07:00
f0e1176a37 2022-06-23 nightly release (9f29d09b3bfbb6c4b64605703f52d7e2fe1fea7f) 2022-06-23 00:02:29 -07:00
4de7faed35 2022-06-22 nightly release (bd7cd7c3569bb708207a80c704355af2ca42b834) 2022-06-22 00:02:50 -07:00
766cc00b15 2022-06-21 nightly release (6d6e77eb6b9649aeb1f21b59145391f28e136a40) 2022-06-21 00:02:35 -07:00
3f72225847 2022-06-20 nightly release (332e43ed1a353a8b0a1d8e0167daaaed1e8b7a34) 2022-06-20 00:02:40 -07:00
c296168bd0 2022-06-19 nightly release (0545c85f744cadc6f06616f15748052cd7cebe20) 2022-06-19 00:02:27 -07:00
c4171aed91 2022-06-18 nightly release (1432a3d6acc7ac58269e5ea7ef6b2373e9a3dbf6) 2022-06-18 00:02:36 -07:00
4c19eece3f 2022-06-17 nightly release (4a4890cfb2440c497fd41e69405ad623c144e32b) 2022-06-17 09:52:36 -07:00
8115b705b3 2022-06-17 nightly release (9d419e81e2cf64504126652def4eec46b72b75c5) 2022-06-17 00:02:34 -07:00
a4fa62064e 2022-06-16 nightly release (86606fbe22c76d71851a38003462e9b6428b47d3) 2022-06-16 00:02:52 -07:00
39c39325d1 2022-06-15 nightly release (bad7720dde598d4300b6523e2c06e61923a2820a) 2022-06-15 00:02:37 -07:00
4981166f3d 2022-06-14 nightly release (18305e30a785a0336837a3e4406b84e3bf36e727) 2022-06-14 00:02:26 -07:00
b6cec31acb 2022-06-13 nightly release (a732bbea232fa32191f259d7cb15e9fabb6c2926) 2022-06-13 00:02:38 -07:00
bf5a87c628 2022-06-12 nightly release (66460c4a6ab0dc29187618bdf0a3d097bbb0ad21) 2022-06-12 00:02:36 -07:00
74ee629516 2022-06-11 nightly release (38350acf8fddc350c893e23a8e77bed2e59f5b06) 2022-06-11 00:02:33 -07:00
92b4285a8d 2022-06-10 nightly release (70d6446a3def513b8082d243d7996ef86c2787a6) 2022-06-10 00:03:19 -07:00
18fa2ad3f3 2022-06-09 nightly release (bfaa187fb083aea66804673de033b17506061de2) 2022-06-09 00:02:53 -07:00
5ab96cd7be 2022-06-08 nightly release (d09e3674d875060c1a848f9634ceff7300fb7c04) 2022-06-08 00:02:33 -07:00
12aaa27135 2022-06-07 nightly release (b40c751c33439bb8721fddb85c25b893ba73ada7) 2022-06-07 00:02:55 -07:00
22aa5e65d2 2022-06-06 nightly release (e6cc2e8d384d3e045b1529c89bcb0460883691e9) 2022-06-06 00:02:46 -07:00
17bd418fc3 2022-06-05 nightly release (7860ce5b79f28d601f31aeade400d9873137fee4) 2022-06-05 00:02:50 -07:00
9c02239648 2022-06-04 nightly release (7860ce5b79f28d601f31aeade400d9873137fee4) 2022-06-04 00:02:36 -07:00
6593c7bbc1 2022-06-03 nightly release (4bb8db85e924b1850d5cf1709a2a0bb7deca603d) 2022-06-03 00:02:43 -07:00
7ba52788aa 2022-06-02 nightly release (4bb8db85e924b1850d5cf1709a2a0bb7deca603d) 2022-06-02 00:02:36 -07:00
b7577f4d6a 2022-06-01 nightly release (01d20491ff2034f283455a59d8faa9b11c43a10c) 2022-06-01 00:02:46 -07:00
d76af8f41c 2022-05-31 nightly release (017b0ae9431ae3780a4eb9bf6d8865dfcd02cd92) 2022-05-31 00:03:03 -07:00
0cac4ed4c7 2022-05-30 nightly release (18273c39da4880b68f8a546978a23516fe5c3e51) 2022-05-30 00:10:03 -07:00
447b0d9521 2022-05-29 nightly release (18273c39da4880b68f8a546978a23516fe5c3e51) 2022-05-29 00:02:36 -07:00
555bfd9e65 2022-05-28 nightly release (18273c39da4880b68f8a546978a23516fe5c3e51) 2022-05-28 00:02:39 -07:00
3038818d93 2022-05-27 nightly release (d98a8148b66fb6f74ce1b437d6bde4f24208bb78) 2022-05-27 00:02:45 -07:00
367ce697da 2022-05-26 nightly release (032d1ace1de47e93edcc69b06da9d79636652991) 2022-05-26 00:02:41 -07:00
281c99ca98 2022-05-25 nightly release (d450034f240681f5133bc61ddc9c0d195c527b59) 2022-05-25 00:02:50 -07:00
8f263fda5c 2022-05-24 nightly release (c7ce4fcc619fab5c82071eac934b505b396ee015) 2022-05-24 00:03:09 -07:00
56d66c84c6 2022-05-23 nightly release (d4345ed0a6c06b1e489e41c219f94d26d3014ce6) 2022-05-23 00:02:35 -07:00
5db8a74f9a 2022-05-22 nightly release (5f1b0a4f48bad11907031ff473b99558ae703593) 2022-05-22 00:03:23 -07:00
00c4d7fa54 2022-05-21 nightly release (47834679ba2f869e66450a74e2add4c04f0006e9) 2022-05-21 00:02:31 -07:00
f5a90b859c 2022-05-20 nightly release (b3e7230efa30e03b9ee8ff3e2b31c38754a6b2b1) 2022-05-20 00:02:54 -07:00
23ce60e364 2022-05-19 nightly release (4124307faeed923175d3e02fa685c0788e99f0ee) 2022-05-19 00:02:44 -07:00
2b57412cef 2022-05-18 nightly release (05ce0f9be63dd6fadd2fb40c29f8f867f267002b) 2022-05-18 11:50:31 -07:00
3e449774fe 2022-05-18 nightly release (7f1e331b34fd0b9caba12aa86469b59366cde9a4) 2022-05-18 00:02:44 -07:00
bd39de2c81 2022-05-17 nightly release (6aea0b10734df77252a2f67315b5c447c263ea6c) 2022-05-17 00:02:47 -07:00
c6efbc72a2 2022-05-16 nightly release (88205886d72486fbeac420c7df99bfe460c38436) 2022-05-16 00:02:33 -07:00
4f32ac4356 2022-05-15 nightly release (e175065c4e31f33247eec4edf026c8ee020d943e) 2022-05-15 00:02:31 -07:00
6a62a087bb 2022-05-14 nightly release (3cade9d454babc70f43ace1ce6481543104934b2) 2022-05-14 00:02:33 -07:00
44bf440b53 2022-05-13 nightly release (65f71c0cbeb080c13e927d37b0d23d39bac6f092) 2022-05-13 00:02:35 -07:00
24898298be 2022-05-12 nightly release (188854eeaf148b685e54a438fa7b9a20d3a28e33) 2022-05-12 00:02:45 -07:00
794df1af9a 2022-05-11 nightly release (fc4f8b5edec69c0ddc63e5210d3f34724d4bd672) 2022-05-11 00:02:36 -07:00
b99396932d 2022-05-10 nightly release (4ceac494259e6b7d57e721793fb0a9ad598a0e12) 2022-05-10 00:02:33 -07:00
e6f2163016 2022-05-09 nightly release (e3dcd175f7aacb6aa2fb150b01335afd508c9d52) 2022-05-09 00:02:29 -07:00
19998c0c4c 2022-05-08 nightly release (e3dcd175f7aacb6aa2fb150b01335afd508c9d52) 2022-05-08 00:02:26 -07:00
60ac495151 2022-05-07 nightly release (901cb7c2e40123822c2d8e5cf4c5af782a4e22a1) 2022-05-07 00:02:33 -07:00
06611fcdd8 2022-05-06 nightly release (040e2e04dd35c49e2b0faeeca964333f7cbc45b9) 2022-05-06 00:03:53 -07:00
0328afd12e 2022-05-05 nightly release (4baf7c0899a2fa9c3630613f37d5fc65971db21c) 2022-05-05 00:02:34 -07:00
1d34bdd21e 2022-05-04 nightly release (201ddafc22e22c387b4cd654f397e05354d73d09) 2022-05-04 00:02:28 -07:00
5a8679f89b 2022-05-03 nightly release (201ddafc22e22c387b4cd654f397e05354d73d09) 2022-05-03 00:02:35 -07:00
fd1f1d0d1b 2022-05-02 nightly release (201ddafc22e22c387b4cd654f397e05354d73d09) 2022-05-02 00:02:41 -07:00
629bca39ad 2022-05-01 nightly release (201ddafc22e22c387b4cd654f397e05354d73d09) 2022-05-01 00:02:30 -07:00
13015c5b75 2022-04-30 nightly release (2540f866ff1eff10dbed7ca47ea9c432e8583da2) 2022-04-30 00:02:31 -07:00
e19fc28ab6 2022-04-29 nightly release (ac31e5d4a3ba20315b5bae26411252c72e0f5c0b) 2022-04-29 00:02:33 -07:00
32808706c5 2022-04-28 nightly release (177ea46332fcde48df5ce0c0efe1552991a5548a) 2022-04-28 00:02:33 -07:00
5a835234d2 2022-04-27 nightly release (ec62901a2c38b63d12843e0f079bdeb7644d8714) 2022-04-27 00:02:34 -07:00
a4974f7e6e 2022-04-26 nightly release (e48b29b1fb1510535efa2a00b6501d5455556aa2) 2022-04-26 00:02:30 -07:00
7393e51ecc 2022-04-25 nightly release (36420b5e8cce9c783903bbc210ed7f2b6535ebf5) 2022-04-25 00:02:36 -07:00
bbd2ae4d6c 2022-04-24 nightly release (77f23d64607142c0cba47f5148ebc1959c9de366) 2022-04-24 00:02:52 -07:00
5b7df69855 2022-04-23 nightly release (2f2158ae4502667951a4d6f5f95d751c7d0927d8) 2022-04-23 00:02:38 -07:00
acc1284169 2022-04-22 nightly release (80fe96c86057efa768ad1eed6c54af0ad259a423) 2022-04-22 00:02:30 -07:00
c0d0c45456 2022-04-21 nightly release (3e10fe323165a94fed66b1487902e8d394ce3be1) 2022-04-21 00:02:32 -07:00
30beb5e09a 2022-04-20 nightly release (d0af05f931e271131e1e207bcc47235059cfd723) 2022-04-20 00:02:39 -07:00
3a600dd46f 2022-04-19 nightly release (736f0d0e46ad70f2382a9035bc72f491970d43f5) 2022-04-19 00:02:41 -07:00
1d23709c9c 2022-04-18 nightly release (f4d89aa28e182bd7768a997a70b033748591ceac) 2022-04-18 00:02:31 -07:00
f827d1bda6 2022-04-17 nightly release (1c5c739993284f908ede18fcd5c1ff79dcadc251) 2022-04-17 00:02:31 -07:00
be96d5d2cb 2022-04-16 nightly release (5dcbcc6de83b552f43415ef60bb2e771bdbc0026) 2022-04-16 00:02:23 -07:00
5cbdc6315b 2022-04-15 nightly release (cc1902a5ed64e5e3b956c5cc6fcbdcd45630f7b8) 2022-04-15 00:02:35 -07:00
053a97bbcb 2022-04-14 nightly release (1118b157bc524944b6094ee0d69adac9ec86931a) 2022-04-14 00:02:33 -07:00
80e358b406 2022-04-13 nightly release (4afe2db641c526800fb8b33532a920be625f3cf4) 2022-04-13 00:02:35 -07:00
80863b0d47 2022-04-12 nightly release (25aa251f37b74ea8354a3e9366912180a6c84613) 2022-04-12 00:02:37 -07:00
3daf56e4fd 2022-04-11 nightly release (a98b4666e0d5b50d876df39a02ae4241d5663ccf) 2022-04-11 14:34:00 -07:00
9afee87fe1 2022-04-11 nightly release (0389f99c4976319f688e936ac520441466f6ab13) 2022-04-11 07:53:19 -07:00
3d8554ce13 2022-04-11 nightly release (caa28ff4959c7cf7165dabf1ffae7c233b8e4b61) 2022-04-11 00:02:30 -07:00
49f28d09b8 2022-04-10 nightly release (f42bdff0166ef503c844353c32b6725391e440ac) 2022-04-10 00:02:29 -07:00
6f47f3883d 2022-04-09 nightly release (58a44523c1f74093ad9a05513a32c45a109b6c55) 2022-04-09 00:02:33 -07:00
02cf8fe543 2022-04-08 nightly release (11f1fef9817bc8a9bc0c3ff7af98ab6cbefe51ed) 2022-04-08 00:02:31 -07:00
8fcdacc23d 2022-04-07 nightly release (ef41201d4acf6b7601577b08b90e36f749db84b8) 2022-04-07 00:02:39 -07:00
59ac7da723 2022-04-06 nightly release (f2a4d49174eb7b705c2c605c45f78d4fa6786be0) 2022-04-06 00:03:18 -07:00
4817424257 2022-04-05 nightly release (b0e047b59da4d6434bf347dd6ae501169218bb1a) 2022-04-05 00:02:42 -07:00
17459dfef2 2022-04-04 nightly release (bf16552617e0f82e6963762b6d40a8ae39105fae) 2022-04-04 00:02:29 -07:00
fa2c3ef58f 2022-04-03 nightly release (bd032cd8d65e36b9d16af42520f5d63e861cb9d4) 2022-04-03 00:02:24 -07:00
4aad5531c0 2022-04-02 nightly release (6efc5c1acfe6afc7268e39d9b058a34e87932560) 2022-04-02 00:04:58 -07:00
2f5ec0f958 2022-04-01 nightly release (c0a6add7eef8ab3715c0f4281b3b7a2a0f1e24b5) 2022-04-01 00:03:50 -07:00
0c6e65a77b 2022-03-31 nightly release (3036a0309dba11d0601d19b5f2d2f4a7e6afa456) 2022-03-31 00:03:40 -07:00
32ec80e619 2022-03-30 nightly release (3269729c683ac01d7e91b109187a69686167bb08) 2022-03-30 00:02:59 -07:00
7313eddbb2 2022-03-29 nightly release (51e50a2ddb9844e0abaf6028fe5e4246448dffff) 2022-03-29 00:03:16 -07:00
ee7c21e4f2 2022-03-28 nightly release (1c5a8125798392f8d7c57e88735f43a14ae0beca) 2022-03-28 00:02:31 -07:00
a9cc98c890 2022-03-27 nightly release (1c5a8125798392f8d7c57e88735f43a14ae0beca) 2022-03-27 00:02:22 -07:00
4e21b8b1d0 2022-03-26 nightly release (90459ba9dc6a812ef74c814d8fd1fa1edaaa27ee) 2022-03-26 00:02:36 -07:00
b7c262acb8 2022-03-25 nightly release (4d82e5bf449929c2732250f8620976e5be96a7f3) 2022-03-25 00:02:40 -07:00
10d252ff4a 2022-03-24 nightly release (d583f9c9d2bd0cc5fb5d369cd498bd62b62c43fe) 2022-03-24 00:03:02 -07:00
6a9d73c33a 2022-03-23 nightly release (65329f4fac8fb22318b7a3eb115e9da207d8d41a) 2022-03-23 00:02:35 -07:00
674a66aea3 2022-03-22 nightly release (7c2103ad5ffdc1ef91231c966988f7f2a61b4166) 2022-03-22 00:02:37 -07:00
3c1c7fd26e 2022-03-21 nightly release (7c2103ad5ffdc1ef91231c966988f7f2a61b4166) 2022-03-21 00:02:48 -07:00
be7f24a66d 2022-03-20 nightly release (38512d90c1380aa7eb66bdfbc52c744bc06b17c6) 2022-03-20 00:02:30 -07:00
2b1020d9c9 2022-03-19 nightly release (b86554abedd9b1bfbca582b1f28e02806a2dfbf8) 2022-03-19 00:02:25 -07:00
46686703a0 2022-03-18 nightly release (6ffe16662f1f4792bf4dc2c2ec35f21b6ff1cc7e) 2022-03-18 00:03:00 -07:00
203c75e473 2022-03-17 nightly release (495e69eaff3037318eafc1b8c2166f11db474a1d) 2022-03-17 00:03:11 -07:00
1a3899dc0e 2022-03-16 nightly release (ac3effd150e829da8bac7525adadec2fad31ece1) 2022-03-16 00:03:08 -07:00
7fc88a1335 2022-03-15 nightly release (060f1b822a6b71e1db07b94f68fdfb53fe47a7d0) 2022-03-15 00:02:26 -07:00
6dc1221394 2022-03-14 nightly release (ef9023e93dabdcfe328bb55869ded08096e2d993) 2022-03-14 00:02:28 -07:00
9b7dac5e86 2022-03-13 nightly release (ef9023e93dabdcfe328bb55869ded08096e2d993) 2022-03-13 00:02:32 -08:00
08dbe0b800 2022-03-12 nightly release (bbdb758423d8f22cfcae492686cba848e3eadbc9) 2022-03-12 00:02:40 -08:00
7d3f48cab3 2022-03-11 nightly release (794f813522deb8afb5e576cec264ce9224cfe067) 2022-03-11 00:02:29 -08:00
7ed23549dd 2022-03-10 nightly release (d8dbf9d8b2d73c4799555af04d8fdf9af9d027a3) 2022-03-10 00:02:31 -08:00
6874a3d788 2022-03-09 nightly release (beda4e8b2f644434202c3d27eac21b5bf9b82321) 2022-03-09 00:02:28 -08:00
df69a9d17e 2022-03-08 nightly release (07410207c487369d87a03e4602f72ec870abe4b7) 2022-03-08 00:02:26 -08:00
8057557dc7 2022-03-07 nightly release (5167e9d59d76d67f4a240f94600ce6bba2c70ed3) 2022-03-07 00:02:27 -08:00
f1c8cb63b4 2022-03-06 nightly release (5167e9d59d76d67f4a240f94600ce6bba2c70ed3) 2022-03-06 00:02:44 -08:00
59ceb67b45 2022-03-05 nightly release (5167e9d59d76d67f4a240f94600ce6bba2c70ed3) 2022-03-05 00:02:31 -08:00
526746077c 2022-03-04 nightly release (b955a046cd7ac6178e8a23c855a98f3d546dec37) 2022-03-04 00:02:25 -08:00
9b5962b28a 2022-03-03 nightly release (715c000cf107a83583cf6292d61e121562296be9) 2022-03-03 00:02:38 -08:00
32c488b10c 2022-03-02 nightly release (81437e66c1cc02f3c18c60b0d3467fc80a51ae09) 2022-03-02 09:20:52 -08:00
54d1329999 2022-03-02 nightly release (2f957f513e6faebb001d60a544aedd8042ce7026) 2022-03-02 00:02:34 -08:00
e71a0f5247 2022-03-01 nightly release (5e86505693c4c7e518e9e08cfa206c6405df7f67) 2022-03-01 00:02:25 -08:00
4d45de45d4 2022-02-28 nightly release (6688487f3ec22b7e89553b8acd6782264061f5cd) 2022-02-28 00:05:23 -08:00
bc9703b4fe 2022-02-27 nightly release (6688487f3ec22b7e89553b8acd6782264061f5cd) 2022-02-27 00:02:20 -08:00
82fe813e3a 2022-02-26 nightly release (6688487f3ec22b7e89553b8acd6782264061f5cd) 2022-02-26 00:02:20 -08:00
bb2f184fe2 2022-02-25 nightly release (6688487f3ec22b7e89553b8acd6782264061f5cd) 2022-02-25 00:02:36 -08:00
143655fcc9 2022-02-24 nightly release (6688487f3ec22b7e89553b8acd6782264061f5cd) 2022-02-24 00:02:51 -08:00
ecf368e6b2 2022-02-23 nightly release (7807a83f6e677987e8cb0bdbebd04ccdd157b68b) 2022-02-23 00:02:44 -08:00
e8d0a25159 2022-02-22 nightly release (f41db99a5690bf0ae4d51d9cdb9aaff95703f6c6) 2022-02-22 00:02:30 -08:00
47698f9118 2022-02-21 nightly release (f41db99a5690bf0ae4d51d9cdb9aaff95703f6c6) 2022-02-21 00:02:38 -08:00
bbad85aa47 2022-02-20 nightly release (f41db99a5690bf0ae4d51d9cdb9aaff95703f6c6) 2022-02-20 00:03:46 -08:00
74e6ae00b6 2022-02-19 nightly release (1646a0033dda77eca980c40870524996b4ad1f76) 2022-02-19 00:02:40 -08:00
ffd57dc90c 2022-02-18 nightly release (d4f3d07ae251876054f5681afc44e0d6057d851b) 2022-02-18 12:41:03 -08:00
cbab909688 2022-02-18 nightly release (d4f3d07ae251876054f5681afc44e0d6057d851b) 2022-02-18 00:02:37 -08:00
56dccfef91 2022-02-17 nightly release (e0e1e0b114d57542e65a054d05a81618e786fe18) 2022-02-17 15:14:25 -08:00
35e605ec6f 2022-02-17 nightly release (bbac8c9c4816c87eae17a04b60d4103940c08f83) 2022-02-17 00:03:18 -08:00
7e58a2be96 2022-02-16 nightly release (47c69933558040158c1533f1b500d15153236b3b) 2022-02-16 00:02:25 -08:00
e61d57f299 2022-02-15 nightly release (47c69933558040158c1533f1b500d15153236b3b) 2022-02-15 00:02:42 -08:00
19bed12bb3 2022-02-14 nightly release (1776caf361fc2c652f0c00b2fd42714447cefc29) 2022-02-14 00:02:37 -08:00
e83d4e52c0 2022-02-13 nightly release (c73cc92eff3160121973930dca2a2927268dc2c1) 2022-02-13 00:02:31 -08:00
a8ce06044a 2022-02-12 nightly release (444191de5680e7c5526614d94adbbcd7cba985e2) 2022-02-12 00:02:34 -08:00
1222e07cd4 2022-02-11 nightly release (730fef25c71dfb7c7d3f403df48541cc0aac154d) 2022-02-11 00:02:29 -08:00
30eddeb999 2022-02-10 nightly release (239531fb3165fbca13a18febbabff493ea4fd55d) 2022-02-10 00:02:25 -08:00
b362168f8c 2022-02-09 nightly release (4b69a2373f8f77548fba82886ab0dc8c06a38d87) 2022-02-09 00:02:52 -08:00
641a582e7b 2022-02-08 nightly release (224093db113fd333a7cb8e5e07308de5098e4a62) 2022-02-08 00:02:38 -08:00
3053858b8d 2022-02-07 nightly release (5c6b897516ac59775d7d58692fad052c399c4ec3) 2022-02-07 00:02:26 -08:00
d804a55840 2022-02-06 nightly release (5c6b897516ac59775d7d58692fad052c399c4ec3) 2022-02-06 00:03:04 -08:00
49ebad53c5 2022-02-05 nightly release (f2f40ce870488b0cfc6e1d9c1cdea3a95f01a4c1) 2022-02-05 00:02:26 -08:00
e9259fa905 2022-02-04 nightly release (1ad53b51d03d9ff4085bcc02b3a65088ed4a6c80) 2022-02-04 00:02:34 -08:00
e2e9c916a5 2022-02-03 nightly release (58dabebcd746aad95a37bdfc7e60e5d22f0f5641) 2022-02-03 00:02:30 -08:00
72056d08cd 2022-02-02 nightly release (f20fa66f704a61b4570ce372add59c7469f44a96) 2022-02-02 00:03:00 -08:00
b5590f46d3 2022-02-01 nightly release (a319bce58d66d12cd71fbf54a145cf7b7563eebc) 2022-02-01 09:00:43 -08:00
2352fadbfa 2022-02-01 nightly release (8fa5cde3a9fab1f82bbb424312995035f2342eaa) 2022-02-01 00:02:33 -08:00
7ddf106b1f 2022-01-31 nightly release (8fa5cde3a9fab1f82bbb424312995035f2342eaa) 2022-01-31 00:02:57 -08:00
3e25201ab5 2022-01-30 nightly release (8fa5cde3a9fab1f82bbb424312995035f2342eaa) 2022-01-30 00:02:53 -08:00
d53c69395a 2022-01-29 nightly release (9413c0cd3e15c2b38e7df500613cff1e7951699e) 2022-01-29 00:02:29 -08:00
c8381acacb 2022-01-28 nightly release (fb0e27d38a8fdab4e1c14d6378c9e41cb30fd6a3) 2022-01-28 10:47:42 -08:00
573a6ad052 2022-01-28 nightly release (7aa4a1f63e14406807ff6cb45faabf0151a63db8) 2022-01-28 00:02:43 -08:00
aaa24811c5 2022-01-27 nightly release (12e01f78257479f91b0f23d8d93318bc92945c87) 2022-01-27 00:02:29 -08:00
a2f0cefe70 2022-01-26 nightly release (12e01f78257479f91b0f23d8d93318bc92945c87) 2022-01-26 00:02:37 -08:00
4d321910fa 2022-01-25 nightly release (b372be4211d40be92ffbb025cac0038ee3445141) 2022-01-25 00:02:51 -08:00
e5e237cdd2 2022-01-24 nightly release (9b3a56eecfab13877d32f0a5051e1584746668bc) 2022-01-24 00:02:35 -08:00
a031636e60 2022-01-23 nightly release (9b3a56eecfab13877d32f0a5051e1584746668bc) 2022-01-23 00:02:21 -08:00
f7e78364e4 2022-01-22 nightly release (7680a0ae9dce001265c34869af469c55653f8c04) 2022-01-22 00:02:54 -08:00
fe71938284 2022-01-21 nightly release (2eb4b05b94811286593171ad1c6f1f97b36cf665) 2022-01-21 00:02:27 -08:00
121e193266 2022-01-20 nightly release (f45e217c0158c247772f6491a18662cc972c29cd) 2022-01-20 00:03:13 -08:00
6ee1aed356 2022-01-19 nightly release (30739f532940a6dbc19bccb56442e6352bf3fed7) 2022-01-19 00:02:39 -08:00
c199e3cdb7 2022-01-18 nightly release (17540c5c80f5c6cd4e0fee42ec47d881e46f47f9) 2022-01-18 00:02:27 -08:00
1261c6d6bb 2022-01-17 nightly release (1ecfa1d61a3e56aab7f3b69c33fac9e9a0eebb6c) 2022-01-17 00:03:26 -08:00
d949a9746b 2022-01-16 nightly release (1ecfa1d61a3e56aab7f3b69c33fac9e9a0eebb6c) 2022-01-16 00:02:28 -08:00
f1436c776e 2022-01-15 nightly release (c43e0286a9fd234b98a0666c8539b65d0e114139) 2022-01-15 00:02:20 -08:00
050720060a 2022-01-14 nightly release (356af8f857d7c8d5948a438002757c4a4a9c45e8) 2022-01-14 00:02:26 -08:00
19172fdb82 2022-01-13 nightly release (60632a00fe6212159164d92ad69766925ab15c97) 2022-01-13 11:21:32 -08:00
9d73bfd9c8 2022-01-10 nightly release (2bed616e0fb1a937b1bb1eee8e6cf83734e839bd) 2022-01-10 00:02:45 -08:00
8e9a40233c 2022-01-09 nightly release (49a07c892265ed89ed8302db15af4647746f6543) 2022-01-09 00:03:02 -08:00
a7432d187f 2022-01-08 nightly release (0eb2fc608c0ea8c4150a6ba823e80a52e4d9d158) 2022-01-08 00:02:27 -08:00
d097728842 2022-01-07 nightly release (ddea6980fe7ce6168e13710164f06bd1b7ca1674) 2022-01-07 00:04:28 -08:00
65fabd7f04 2022-01-06 nightly release (e1aa5db108deb4f3e0faa95b703d4bbc9f7453b7) 2022-01-06 00:02:29 -08:00
39e21b875b 2022-01-05 nightly release (14457bb8cbaebf501872fc4183376eb6d7e62d81) 2022-01-05 00:03:09 -08:00
aa16a081e0 2022-01-03 nightly release (fa09099ba35fcd42347732ca3a5f8ddaf145da1b) 2022-01-03 00:02:28 -08:00
b526651566 2022-01-02 nightly release (ce86881afadc0fea628c7e47d64a4073f3e09894) 2022-01-02 00:05:04 -08:00
758a1e7d2d 2022-01-01 nightly release (ce86881afadc0fea628c7e47d64a4073f3e09894) 2022-01-01 00:02:20 -08:00
21348f6cdb 2021-12-31 nightly release (7bfaa230be303280740b1f94574fb6bc4aee2758) 2021-12-31 00:02:26 -08:00
d94f86e906 2021-12-30 nightly release (2b00dbbbbcb23dc506072a81d04145fb5f983490) 2021-12-30 00:04:33 -08:00
6d3cc13581 2021-12-29 nightly release (a0c99a8d3bce895c6bb6a220e98ca84274eb33dd) 2021-12-29 00:03:09 -08:00
766e7ac1bf 2021-12-28 nightly release (795af1578ccade97792646e02e5ec3c295385501) 2021-12-28 00:05:39 -08:00
5796475e9f 2021-12-27 nightly release (795af1578ccade97792646e02e5ec3c295385501) 2021-12-27 00:03:03 -08:00
59aa7dd7b7 2021-12-26 nightly release (795af1578ccade97792646e02e5ec3c295385501) 2021-12-26 00:02:26 -08:00
788fb7ea22 2021-12-25 nightly release (795af1578ccade97792646e02e5ec3c295385501) 2021-12-25 00:02:28 -08:00
3264153060 2021-12-24 nightly release (795af1578ccade97792646e02e5ec3c295385501) 2021-12-24 00:02:29 -08:00
81d11862e2 2021-12-23 nightly release (a421ee0e52e57e854656d40df3d8280af27ab4a0) 2021-12-23 00:02:56 -08:00
ec8f98980f 2021-12-22 nightly release (6217fee96bf1dc4e1343935d7ff87053f9bf22e4) 2021-12-22 00:02:42 -08:00
8f2a8717fb 2021-12-21 nightly release (5651e1e3ad435ec45b9435b9d78ff2fbc715fa0b) 2021-12-21 00:02:26 -08:00
4811137a2b 2021-12-20 nightly release (7ea86dfdb162758c9fbbf6807ab1dd778591c062) 2021-12-20 00:03:21 -08:00
f0e58f4f9b 2021-12-19 nightly release (fb34af1b211d0b0ed7c2fcc63599215c8fb107c2) 2021-12-19 00:02:54 -08:00
ff227a22b7 2021-12-18 nightly release (9ee3006d583e9853da79f7bd12431c67a863519a) 2021-12-18 00:02:27 -08:00
bb1f3fce83 2021-12-17 nightly release (4a6a5d163012a81268f0db20e9fcc3a926dd4d54) 2021-12-17 00:02:23 -08:00
54d8b577b7 2021-12-16 nightly release (3d358a76783acef87f282cc0fb20818eeb4c133c) 2021-12-16 00:02:38 -08:00
1446d40fb4 2021-12-15 nightly release (3d358a76783acef87f282cc0fb20818eeb4c133c) 2021-12-15 00:02:58 -08:00
785dab1d01 2021-12-14 nightly release (3d358a76783acef87f282cc0fb20818eeb4c133c) 2021-12-14 00:02:31 -08:00
95346adead 2021-12-13 nightly release (e948856ce7ead632483e35714a9a27ea2fa68ee7) 2021-12-13 00:05:52 -08:00
18dc1aa4a1 2021-12-12 nightly release (e948856ce7ead632483e35714a9a27ea2fa68ee7) 2021-12-12 00:02:38 -08:00
0e83eb8b88 2021-12-11 nightly release (e948856ce7ead632483e35714a9a27ea2fa68ee7) 2021-12-11 00:03:25 -08:00
bfd8a5fd5f 2021-12-10 nightly release (e948856ce7ead632483e35714a9a27ea2fa68ee7) 2021-12-10 00:02:19 -08:00
fc105afbd8 2021-12-09 nightly release (8b20dde93240642b3fce14b304e2d5e6d09d9891) 2021-12-09 00:02:36 -08:00
98fc2d6d95 2021-12-08 nightly release (e06af7913692641dbcad1cf4d7931f32b8b4c2a7) 2021-12-08 09:28:55 -08:00
4c5efdfc23 2021-12-08 nightly release (bd8a4a937216433b281a333905175214a12f5fc1) 2021-12-08 00:02:35 -08:00
f12c9eb1a0 2021-12-07 nightly release (38c576cfef0473e2d1921e83ed52a2b4364cdede) 2021-12-07 00:04:14 -08:00
7ae7bea489 2021-12-06 nightly release (6a4fa860262951ca14db0c53fc7d60c3dbc48af4) 2021-12-06 00:02:59 -08:00
ea056f797f 2021-12-05 nightly release (6a4fa860262951ca14db0c53fc7d60c3dbc48af4) 2021-12-05 00:07:22 -08:00
6c54275622 2021-12-04 nightly release (6a4fa860262951ca14db0c53fc7d60c3dbc48af4) 2021-12-04 00:04:13 -08:00
ef07820bab 2021-12-03 nightly release (abda069ce203bbe9b655a939a58a7fedca86af80) 2021-12-03 00:03:58 -08:00
afbb6e88ab 2021-12-02 nightly release (cbe0a38d8c99e5c860d61818b2452ee3efa6d22f) 2021-12-02 00:02:27 -08:00
4b0e1e1c0a 2021-12-01 nightly release (e534c5efd7e7a0ed476fa1e9ec2f37ccd3a1b63d) 2021-12-01 00:02:40 -08:00
379f835b6c 2021-11-30 nightly release (1342f19a8c2d92ed86bc32a0b3a6fc9b35b3e3e3) 2021-11-30 00:06:12 -08:00
9212bb98d6 2021-11-29 nightly release (6ae34ea6f8b65d2ad36e417c3d525507901cfd7f) 2021-11-29 00:06:32 -08:00
1b6bb4f7b0 2021-11-28 nightly release (b10929a14a32c90ac5943cc3f27a3a9105e6f53f) 2021-11-28 00:10:41 -08:00
13468ed9ea 2021-11-27 nightly release (cffad597ea620600472c509b642a9ad03a56fdbe) 2021-11-27 00:05:38 -08:00
f547c5c960 2021-11-26 nightly release (5fdcc20d8d96a6b42387f57c2ce331516ad94228) 2021-11-26 00:07:52 -08:00
dfa8a087e3 2021-11-25 nightly release (5fdcc20d8d96a6b42387f57c2ce331516ad94228) 2021-11-25 00:04:09 -08:00
84395832d9 2021-11-24 nightly release (b5b62b340891f041b378681577c74922d21700a9) 2021-11-24 00:04:25 -08:00
e42361b602 2021-11-23 nightly release (d6a68e0b8dfe314da642e17e84b1f4cb095d00ba) 2021-11-23 00:07:38 -08:00
7cdf1ed9bd 2021-11-22 nightly release (e1c449ff342ee00a4b25e1651bd08a41b7e9fda4) 2021-11-22 00:03:38 -08:00
95589e9ba3 2021-11-21 nightly release (c7ecf1498d961415006c3710ac8d99166fe5d634) 2021-11-21 00:07:57 -08:00
cf31235b62 2021-11-20 nightly release (75955e4ef8a941d72db20c5098371325bd83ffd1) 2021-11-20 00:05:00 -08:00
15077954a3 2021-11-19 nightly release (ca921117589dfd8eca7c18bb2c31350dd26bbd45) 2021-11-19 00:02:50 -08:00
40631bb9cd 2021-11-18 nightly release (980778713588be8a583557c4693634c3782b00c4) 2021-11-18 00:02:53 -08:00
4eb447d938 2021-11-17 nightly release (aa9ee8d02aecee1a01f7e4fe22fd0660851227e8) 2021-11-17 00:06:20 -08:00
bfc110e848 2021-11-16 nightly release (722af775c3ff01c4af4f3fe79bb12997c8ab665a) 2021-11-16 00:05:37 -08:00
649bf082f6 2021-11-15 nightly release (1adeeabdc0c8832420c091c5c668843768530d7f) 2021-11-15 00:03:03 -08:00
228a46daa1 2021-11-14 nightly release (1adeeabdc0c8832420c091c5c668843768530d7f) 2021-11-14 00:06:12 -08:00
39c478062a 2021-11-13 nightly release (80339e85c5fea7c1f3ef1dd63ce67c049691194f) 2021-11-13 00:03:34 -08:00
96a56d12b5 2021-11-12 nightly release (613c1aca6dcc5c9764d81ddfb1e33f7b7b06bd5f) 2021-11-12 00:02:50 -08:00
65f638ab6c 2021-11-11 nightly release (1f07efd0f21e8750967c907c3f96b5344d3ab8c0) 2021-11-11 00:02:47 -08:00
4670c0042a 2021-11-10 nightly release (6b44e75f6bccca7acc8ec31a635f1175c265ac54) 2021-11-10 00:05:00 -08:00
ce5eb6a07a 2021-11-09 nightly release (4b1d044498f49ad5f770354174070a4277279c2d) 2021-11-09 00:02:29 -08:00
05a2aa8a7f 2021-11-08 nightly release (191b48b12f33e1e9525882da0c62b68686d69e42) 2021-11-08 00:04:23 -08:00
2512eac760 2021-11-07 nightly release (9fb3ba9d7bc5f6f8852a2bafe053b16a26d73e51) 2021-11-07 00:02:41 -07:00
9f25d95f9b 2021-11-06 nightly release (ae501a9727c16b8bc83c2fd7b40d9d801133ac91) 2021-11-06 00:04:47 -07:00
458c445463 2021-11-05 nightly release (f1754319e32f302a53520ac3608addb31402209b) 2021-11-05 00:03:07 -07:00
f14f488fd2 2021-11-04 nightly release (2486061c72c12ac7eb53946448f0a29525bf8cd1) 2021-11-04 00:09:46 -07:00
755da00eda 2021-11-03 nightly release (54241a9cfa1d4c5ee2096cd9acec1ae7bf623c37) 2021-11-03 00:07:22 -07:00
c81e8a07b2 2021-11-02 nightly release (c65f332da47eb9bc76aefc50122cae2630fff2cc) 2021-11-02 00:12:01 -07:00
c0f53f117a 2021-11-01 nightly release (97f29bda59deab8c063cf01f0a8ff4321b93c55e) 2021-11-01 00:11:29 -07:00
79922746b0 2021-10-31 nightly release (510e3026a9bdac6e347e8756c7750029201ecc82) 2021-10-31 00:03:48 -07:00
1a13e431d3 2021-10-30 nightly release (c00806beda43063a0978a81aa4e6ce42cb893fe0) 2021-10-30 00:04:21 -07:00
56587756dc 2021-10-29 nightly release (26241994b26df626ca81ec7c666b54bea90e2dd8) 2021-10-29 00:06:31 -07:00
729c0ef82e 2021-10-28 nightly release (fae1c0a4340ef4698f43875f5ed84ba512bdcd95) 2021-10-28 00:06:50 -07:00
4a04d2b0fe 2021-10-27 nightly release (c88da701e2f94987ea58a0980cc19cfb956cd980) 2021-10-27 00:02:48 -07:00
85a7535c03 2021-10-26 nightly release (0e371e413dd12b93cf50ad2441652ee6e0eb7217) 2021-10-26 00:11:04 -07:00
52fec21b2f 2021-10-25 nightly release (83f70db95cf9f53a98fae63d511b5013279a9ac2) 2021-10-25 00:13:15 -07:00
ef0fca4249 2021-10-24 nightly release (83f70db95cf9f53a98fae63d511b5013279a9ac2) 2021-10-24 00:03:57 -07:00
db84da55b7 2021-10-23 nightly release (83f70db95cf9f53a98fae63d511b5013279a9ac2) 2021-10-23 00:03:23 -07:00
5c9ba2feff 2021-10-22 nightly release (9d4549295d8da634771d6040c224d21928805be7) 2021-10-22 00:04:51 -07:00
15fecc4c83 2021-10-21 nightly release (f8f04d5424f9ea3068bcd16d1c799f161944936f) 2021-10-21 00:02:52 -07:00
03975588d7 2021-10-20 nightly release (32e3003726070597da9c6237e59c8ba09744cfe2) 2021-10-20 00:06:35 -07:00
c186c995df 2021-10-19 nightly release (0d203a16fed340488be0966d377e8a6f7f8f4244) 2021-10-19 00:04:57 -07:00
15ef6a1052 2021-10-18 nightly release (49a1d7bfcb9b0fc5f0ee62d91f5d6c76e4cf9dc8) 2021-10-18 00:06:57 -07:00
cb85f701d4 2021-10-17 nightly release (49a1d7bfcb9b0fc5f0ee62d91f5d6c76e4cf9dc8) 2021-10-17 00:08:45 -07:00
1392b8260e 2021-10-16 nightly release (49a1d7bfcb9b0fc5f0ee62d91f5d6c76e4cf9dc8) 2021-10-16 00:05:17 -07:00
8640112dea 2021-10-15 nightly release (49a1d7bfcb9b0fc5f0ee62d91f5d6c76e4cf9dc8) 2021-10-15 00:04:15 -07:00
d1b988e47c 2021-10-14 nightly release (6634570aef94b34dfc3a40865c9ba057356e6a47) 2021-10-14 00:04:21 -07:00
da7bfd9bcc 2021-10-13 nightly release (06a156efc75f726ca95cadc043bab0c0d75d7548) 2021-10-13 00:04:56 -07:00
e5dba35501 2021-10-12 nightly release (99711133403eff8474af0e710a45d367f4fb5e66) 2021-10-12 00:17:43 -07:00
90bf35d772 2021-10-11 nightly release (99711133403eff8474af0e710a45d367f4fb5e66) 2021-10-11 00:03:33 -07:00
29fe93c375 2021-10-10 nightly release (b96c7aea73705a94e584084fa410a7392cb7523f) 2021-10-10 00:04:45 -07:00
654e2c11ec 2021-10-09 nightly release (85b562dd2bd0a0738159746ce83bf110effcc065) 2021-10-09 00:05:22 -07:00
cba9960d6a 2021-10-08 nightly release (ca363d1e22f56722855d2c937eeb508784ad4e33) 2021-10-08 00:07:12 -07:00
a1937d7c45 2021-10-07 nightly release (67970e8c9b326e8e5d6e30ce652ec965f3599f3f) 2021-10-07 00:15:48 -07:00
6102554be1 2021-10-06 nightly release (3bd26792c06a4537883537997078de9d4a73e48d) 2021-10-06 00:08:48 -07:00
a21f2ab401 2021-10-05 nightly release (6eb3a1c8310253c6875d1d703f3cd00cdd9b00f1) 2021-10-05 00:04:15 -07:00
125a559b5c 2021-10-04 nightly release (89ed9bdaee468195faed1eb230960491eb350304) 2021-10-04 00:03:29 -07:00
a7b6201b1e 2021-10-03 nightly release (29c0725e8a0de91e1a4338dec785d1638aa02931) 2021-10-03 00:05:49 -07:00
2cead16bcb 2021-10-02 nightly release (10f62942818281eb99e0bef5f1afcc71c66599a7) 2021-10-02 00:12:13 -07:00
03686b218d 2021-10-01 nightly release (8297a16cc07fa88799fd221cca8d5efa1b71fe5f) 2021-10-01 00:15:23 -07:00
6a58458563 2021-09-30 nightly release (3900509b7da866c22e08120e73a0b54cc30e4de7) 2021-09-30 00:06:50 -07:00
13ee5d4f98 2021-09-29 nightly release (93852bb2d41d90b6ac660015d79f7474bcebb774) 2021-09-29 00:02:57 -07:00
01e3a7f7e7 2021-09-28 nightly release (63bb7c6dbab5644c6f95ccfb66de00db40fa6340) 2021-09-28 00:04:10 -07:00
1a7aea8cb1 2021-09-27 nightly release (146817c9d07ecc6f3af8111f31e37a8c64948cc0) 2021-09-27 00:05:24 -07:00
55eac26522 2021-09-26 nightly release (146817c9d07ecc6f3af8111f31e37a8c64948cc0) 2021-09-26 00:07:02 -07:00
eb64e52a57 2021-09-25 nightly release (af7238f2149fd23b87dbec0b5e0a2e5a82e7f0d1) 2021-09-25 00:03:57 -07:00
af95408d21 2021-09-24 nightly release (d78b3909e83fe6b8f3629bd7013528edbd7edc26) 2021-09-24 00:04:48 -07:00
42cf661a25 2021-09-23 nightly release (0ca1102609e9d2b06bad6965b695c0d76d8ffe46) 2021-09-23 00:05:23 -07:00
701771a879 2021-09-22 nightly release (3f5f721ab34e8b540bba47efbb9fc93d1bac0921) 2021-09-22 00:04:12 -07:00
6cf311ca6d 2021-09-21 nightly release (0dc98728bc06e1e0a553b8ad4ed2591b688cc72f) 2021-09-21 00:06:56 -07:00
5b07b6ad2f 2021-09-20 nightly release (88032d894311e5c0aed8bbc21a4306bc6be4af82) 2021-09-20 00:03:29 -07:00
87df24cd30 2021-09-19 nightly release (88032d894311e5c0aed8bbc21a4306bc6be4af82) 2021-09-19 00:03:22 -07:00
bd05cbf977 2021-09-18 nightly release (6707dfeefb00512eba1d95cb91306671638dc4d5) 2021-09-18 00:03:25 -07:00
64fd318420 2021-09-17 nightly release (8c38d141df429459ea6891847950ce157ac82b2c) 2021-09-17 00:04:16 -07:00
20ba201201 2021-09-16 nightly release (2bb898e039d90d3897cc0fbd1886f2b2fe4dbcfb) 2021-09-16 00:03:54 -07:00
64187fc690 2021-09-15 nightly release (6ab97fbc287ea89ce81abcb24829b502aeb309cc) 2021-09-15 00:03:13 -07:00
99da1f699d 2021-09-14 nightly release (c4073af61d4e530f8627dced870f42526d93dcaf) 2021-09-14 00:06:38 -07:00
84a9f5031f 2021-09-13 nightly release (d46ea03871562d4c93a9dfedcb211051338b44a9) 2021-09-13 00:09:58 -07:00
58df55da4d 2021-09-12 nightly release (dd2d48df07d6b9354f3e448bf66655941a22d1e1) 2021-09-12 00:04:31 -07:00
6862deee75 2021-09-11 nightly release (ade4bf3e82ea369e77d511a796ba936ce281aa89) 2021-09-11 00:02:27 -07:00
5286f993ad 2021-09-10 nightly release (11ef68938c4dda67efb9944ec4f3521776e0f9bc) 2021-09-10 00:05:13 -07:00
c99e75cc1e 2021-09-09 nightly release (15532595209d2daf34d35e10f8d3d3b64966aea2) 2021-09-09 00:09:14 -07:00
58c529752a 2021-09-08 nightly release (1a1fb31cfa3135e56c533da037b5d8dc6981b7fa) 2021-09-08 00:09:21 -07:00
277d05ac57 2021-09-07 nightly release (0c4e4e588e2e3308c659f741e7bc5cabf0975c09) 2021-09-07 00:02:23 -07:00
b7e88bfb19 2021-09-06 nightly release (0c4e4e588e2e3308c659f741e7bc5cabf0975c09) 2021-09-06 00:09:57 -07:00
d5e8254a4a 2021-09-05 nightly release (0c4e4e588e2e3308c659f741e7bc5cabf0975c09) 2021-09-05 00:04:55 -07:00
42fac310f6 2021-09-04 nightly release (0c4e4e588e2e3308c659f741e7bc5cabf0975c09) 2021-09-04 00:04:19 -07:00
7ee25ea72d 2021-09-03 nightly release (0c4e4e588e2e3308c659f741e7bc5cabf0975c09) 2021-09-03 00:09:38 -07:00
c9eff59b96 2021-09-02 nightly release (0c4e4e588e2e3308c659f741e7bc5cabf0975c09) 2021-09-02 00:07:41 -07:00
9c414cc253 2021-09-01 nightly release (0c4e4e588e2e3308c659f741e7bc5cabf0975c09) 2021-09-01 10:47:03 -07:00
aab14ffd43 2021-09-01 nightly release (8406dba65af414f4ac9bb569ca8d70752611e4ba) 2021-09-01 00:05:36 -07:00
cca8c1fe99 2021-08-31 nightly release (8406dba65af414f4ac9bb569ca8d70752611e4ba) 2021-08-31 00:05:17 -07:00
47829fdabc 2021-08-30 nightly release (8406dba65af414f4ac9bb569ca8d70752611e4ba) 2021-08-30 00:03:14 -07:00
5ceddc29e7 2021-08-29 nightly release (8406dba65af414f4ac9bb569ca8d70752611e4ba) 2021-08-29 00:02:17 -07:00
fc8a7a5769 2021-08-28 nightly release (8406dba65af414f4ac9bb569ca8d70752611e4ba) 2021-08-28 00:04:29 -07:00
c9287bd4b7 2021-08-27 nightly release (510d2ece81c18ba3f9bbcbc509effe281a77206c) 2021-08-27 00:06:04 -07:00
3d4fed4dc0 2021-08-26 nightly release (f4bc28990f6edcaf6bfc9e9737a70ea0be514198) 2021-08-26 00:03:25 -07:00
ece2e093bc 2021-08-25 nightly release (8dda299d9631e0f6e121dcb9f8f94bbdd8435515) 2021-08-25 00:11:42 -07:00
0d14ad556e 2021-08-24 nightly release (f5d585391d13287250e85cbd55a17c5e0b8ac2a8) 2021-08-24 00:11:28 -07:00
ca87998c97 2021-08-23 nightly release (d6133b2fe6b863dd49fb21641bd04e24e19ac794) 2021-08-23 00:02:58 -07:00
058c01d851 2021-08-22 nightly release (2289a12f21c54da93bf5d696e3f9aea83dd9c10d) 2021-08-22 00:03:02 -07:00
161d945f2d 2021-08-21 nightly release (a8de0d83fed2d68512c0b0e20716bd63e6769469) 2021-08-21 00:02:47 -07:00
38b78b9bfe 2021-08-20 nightly release (ccca66597ab7079c39b744c2906171aa63e7db61) 2021-08-20 00:13:47 -07:00
41b2532c21 2021-08-19 nightly release (2aa19f33c6272cb016c629792ddc70cf9636fc9a) 2021-08-19 00:02:54 -07:00
67788eefcd 2021-08-18 nightly release (cd5e9dcc1dd2d240aa034b1f02b9886b44b1efa6) 2021-08-18 00:03:53 -07:00
c21b6c1529 2021-08-17 nightly release (075024b9a34904ec3ecdab3704c3bcaa329bdfea) 2021-08-17 00:08:09 -07:00
100f6e1ee6 2021-08-16 nightly release (3c1d1170a4ca2e50a846c69cc61583a29952afee) 2021-08-16 00:03:21 -07:00
b27b86ad55 2021-08-15 nightly release (3c1d1170a4ca2e50a846c69cc61583a29952afee) 2021-08-15 00:07:23 -07:00
fa0a4e5a49 2021-08-14 nightly release (3c1d1170a4ca2e50a846c69cc61583a29952afee) 2021-08-14 00:09:34 -07:00
2cbeb755f3 2021-08-13 nightly release (3c1d1170a4ca2e50a846c69cc61583a29952afee) 2021-08-13 00:09:28 -07:00
c0abc5fe82 2021-08-12 nightly release (3c1d1170a4ca2e50a846c69cc61583a29952afee) 2021-08-12 00:02:42 -07:00
54912fec6b 2021-08-11 nightly release (3c1d1170a4ca2e50a846c69cc61583a29952afee) 2021-08-11 14:52:11 -07:00
bcd79e571e 2021-08-11 nightly release (3c1d1170a4ca2e50a846c69cc61583a29952afee) 2021-08-11 00:03:01 -07:00
56b302f2a7 2021-08-10 nightly release (3c1d1170a4ca2e50a846c69cc61583a29952afee) 2021-08-10 00:03:00 -07:00
269b2a58d4 2021-08-09 nightly release (30214aef2d831067b8962398321f1fb20d359c97) 2021-08-09 00:04:07 -07:00
fd62ee1fff 2021-08-08 nightly release (30214aef2d831067b8962398321f1fb20d359c97) 2021-08-08 00:04:31 -07:00
2b4586f493 2021-08-07 nightly release (4bd54cebe0b736acbcb8f040df897d17956bb71b) 2021-08-07 00:03:28 -07:00
f9574bca64 2021-08-06 nightly release (4bd54cebe0b736acbcb8f040df897d17956bb71b) 2021-08-06 00:04:35 -07:00
b9e662f0e4 2021-08-05 nightly release (4bd54cebe0b736acbcb8f040df897d17956bb71b) 2021-08-05 00:02:27 -07:00
726dc164e6 2021-08-04 nightly release (773a8eede437de972311b21824694e4fb120f6a2) 2021-08-04 00:07:56 -07:00
e2c5ce2ac5 2021-08-03 nightly release (10d3a2c13a1ae547ee5ece5838d66432158d4b22) 2021-08-03 00:02:25 -07:00
4acea95c07 2021-08-02 nightly release (7adb78017a81b656d22864ec97c072519a7dca63) 2021-08-02 00:03:19 -07:00
7e69745b3b 2021-08-01 nightly release (32b37ba2462d9d87337a4fe332f95524a4c49777) 2021-08-01 00:02:33 -07:00
19454af809 2021-07-31 nightly release (7521addede276bd978fc2b113a6f7d010273bdf1) 2021-07-31 00:02:30 -07:00
8b5dbd9749 2021-07-30 nightly release (8f519c5e07e9440a4beb519463302c0cdf2a4b37) 2021-07-30 00:07:42 -07:00
b24169897d 2021-07-29 nightly release (cac4aa71ca262dadd6a887bd1f449b3a35f03bf3) 2021-07-29 00:07:41 -07:00
145a20b1c8 2021-07-28 nightly release (df18d05429d8283ad7b848fa7fca0e06b7ec630b) 2021-07-28 00:03:32 -07:00
7fce0bbbd8 2021-07-27 nightly release (d2594fa5380954eba7d013044ebdde3c041b650d) 2021-07-27 00:02:30 -07:00
fe20ae42aa 2021-07-26 nightly release (8152433de2420fc64063366bebd7b1feadf38d1f) 2021-07-26 00:03:08 -07:00
68fa6f6579 2021-07-25 nightly release (8152433de2420fc64063366bebd7b1feadf38d1f) 2021-07-25 00:04:58 -07:00
cf42c96705 2021-07-24 nightly release (e856a45283c6a215c78597d05f6e8dfeb0030c49) 2021-07-24 00:03:01 -07:00
b324086eca 2021-07-23 nightly release (a9b0a921d592b328e7e80a436ef065dadda5f01b) 2021-07-23 00:04:42 -07:00
2bfbfd8285 2021-07-21 nightly release (94965212e5bf959b029e59d685162b1929bb4c31) 2021-07-21 00:02:20 -07:00
10d38a8b75 2021-07-19 nightly release (94965212e5bf959b029e59d685162b1929bb4c31) 2021-07-19 00:03:15 -07:00
590290b1d0 2021-07-18 nightly release (94965212e5bf959b029e59d685162b1929bb4c31) 2021-07-18 00:02:35 -07:00
d4adc41437 2021-07-17 nightly release (94965212e5bf959b029e59d685162b1929bb4c31) 2021-07-17 00:03:15 -07:00
604c318ee0 2021-07-16 nightly release (94965212e5bf959b029e59d685162b1929bb4c31) 2021-07-16 00:02:40 -07:00
d88bfc953f 2021-07-15 nightly release (94965212e5bf959b029e59d685162b1929bb4c31) 2021-07-15 00:02:11 -07:00
7a317743cf 2021-07-14 nightly release (60382de455ea5f860d5787fac72ea100b7a405bb) 2021-07-14 00:03:10 -07:00
98f637eb78 2021-07-13 nightly release (8a2c7d902fb159bb57a0cf46f3a5ba74095cc34a) 2021-07-13 00:02:45 -07:00
27cbbb870c 2021-07-12 nightly release (76c0f223d3ff430369a2864cf4e05464b1bfb688) 2021-07-12 00:03:04 -07:00
55e9cf2d89 2021-07-11 nightly release (76c0f223d3ff430369a2864cf4e05464b1bfb688) 2021-07-11 00:04:40 -07:00
abc594d345 2021-07-10 nightly release (76c0f223d3ff430369a2864cf4e05464b1bfb688) 2021-07-10 00:05:52 -07:00
3b69ec8047 2021-07-09 nightly release (80797d03e072d32c36fd26589f03368b1536001d) 2021-07-09 00:02:44 -07:00
fc79ae3d0e 2021-07-08 nightly release (c03f99f3ef2db0cd61f9bf3813492cd9fda9ce79) 2021-07-08 00:02:52 -07:00
273fb34c2f 2021-07-07 nightly release (5da507b57bee31805217889b9f8ff673915dbc3a) 2021-07-07 00:03:41 -07:00
b2bec4fed4 2021-07-06 nightly release (5576c7bdd1a21187ae4ff6bc6c584555d5470a43) 2021-07-06 00:02:14 -07:00
fb10eeee5b 2021-07-05 nightly release (5576c7bdd1a21187ae4ff6bc6c584555d5470a43) 2021-07-05 00:06:20 -07:00
8ab947ade6 2021-07-04 nightly release (5576c7bdd1a21187ae4ff6bc6c584555d5470a43) 2021-07-04 00:02:48 -07:00
17a841ec7b 2021-07-03 nightly release (5576c7bdd1a21187ae4ff6bc6c584555d5470a43) 2021-07-03 00:02:56 -07:00
b31c9ebcab 2021-07-02 nightly release (5576c7bdd1a21187ae4ff6bc6c584555d5470a43) 2021-07-02 00:02:43 -07:00
0487088661 2021-07-01 nightly release (5576c7bdd1a21187ae4ff6bc6c584555d5470a43) 2021-07-01 00:02:35 -07:00
f502a6f6b9 2021-06-30 nightly release (5bc28c897e5d00c0a839de9692b9b54de5294aff) 2021-06-30 00:02:26 -07:00
1f6616c269 2021-06-29 nightly release (d71e7ae740f635534594075fc3060e24c399d0dc) 2021-06-29 00:03:46 -07:00
d3c62eb854 2021-06-28 nightly release (3bfe15085d6a01b3accf75647d12c70084392a61) 2021-06-28 00:02:36 -07:00
be1a85d8c7 2021-06-27 nightly release (dae25c2002a0d7201588e6ff60316a8d9ace88a8) 2021-06-27 00:02:32 -07:00
f82186a4bb 2021-06-26 nightly release (ad69e2fd1120ac37f826c09ee2f1654f1d5c80b2) 2021-06-26 00:02:35 -07:00
9f626092c9 2021-06-25 nightly release (2dedd96dd259a37f7a812856b43de29a14381aee) 2021-06-25 00:04:26 -07:00
1221896926 2021-06-24 nightly release (5bd49c3396e7dd0380b0ee8633a6ae7c3791e479) 2021-06-24 00:03:03 -07:00
61ec571d74 2021-06-23 nightly release (5bd49c3396e7dd0380b0ee8633a6ae7c3791e479) 2021-06-23 00:02:34 -07:00
360e0ec73e 2021-06-22 nightly release (5bd49c3396e7dd0380b0ee8633a6ae7c3791e479) 2021-06-22 00:02:33 -07:00
143c23e586 2021-06-21 nightly release (3815a013ed76e2a0a0b15fbcd0d419d9be322254) 2021-06-21 00:03:00 -07:00
d65834d579 2021-06-20 nightly release (3815a013ed76e2a0a0b15fbcd0d419d9be322254) 2021-06-20 00:05:17 -07:00
932d09235e 2021-06-19 nightly release (3815a013ed76e2a0a0b15fbcd0d419d9be322254) 2021-06-19 00:03:50 -07:00
eed1219e4b 2021-06-18 nightly release (be038d89898d0d2111b8acedefd08ceed62664cb) 2021-06-18 00:02:08 -07:00
7be6ef5eeb 2021-06-17 nightly release (be038d89898d0d2111b8acedefd08ceed62664cb) 2021-06-17 00:02:32 -07:00
d7ab6005b9 2021-06-16 nightly release (be038d89898d0d2111b8acedefd08ceed62664cb) 2021-06-16 00:03:01 -07:00
f8fb474ad9 2021-06-15 nightly release (1fc3576d97b5c551ea1add6e81da530070f2e03f) 2021-06-15 00:03:05 -07:00
c742b6c9c3 2021-06-14 nightly release (1fc3576d97b5c551ea1add6e81da530070f2e03f) 2021-06-14 00:02:18 -07:00
cb3b64cb01 2021-06-13 nightly release (1fc3576d97b5c551ea1add6e81da530070f2e03f) 2021-06-13 00:02:19 -07:00
54dc5817a9 2021-06-12 nightly release (1fc3576d97b5c551ea1add6e81da530070f2e03f) 2021-06-12 00:03:35 -07:00
5dc15963c2 2021-06-11 nightly release (1fc3576d97b5c551ea1add6e81da530070f2e03f) 2021-06-11 00:02:07 -07:00
ddaf6a0647 2021-06-10 nightly release (2f395f3b5466b72638847df37f5b3948ae4c5948) 2021-06-10 00:02:43 -07:00
210259b208 2021-06-09 nightly release (77dde35f1a55aaebc7ad3df53edb6796a1183a24) 2021-06-09 00:02:29 -07:00
89e6f30509 2021-06-08 nightly release (77dde35f1a55aaebc7ad3df53edb6796a1183a24) 2021-06-08 00:02:40 -07:00
f7bfc7bb4d 2021-06-07 nightly release (de40c8e495ffc7b4042ac544ba38759c4dcb5d50) 2021-06-07 00:02:39 -07:00
216f8135a8 2021-06-06 nightly release (0a5bfa99198a1c07e4eeec35dd8c20082a05f63a) 2021-06-06 00:04:49 -07:00
93404aa88f 2021-06-05 nightly release (c769300301e8bc775e54cdd114e2beaa11b1f893) 2021-06-05 00:02:17 -07:00
ba9b756a30 2021-06-04 nightly release (f87aa23125ff92520204fa0ca201c2d169fd3e17) 2021-06-04 00:02:51 -07:00
abca349be8 2021-06-03 nightly release (f87aa23125ff92520204fa0ca201c2d169fd3e17) 2021-06-03 00:07:09 -07:00
789de8268a 2021-06-02 nightly release (1d37f415671976c36f6bf3fa4b83384db066fc2f) 2021-06-02 00:03:00 -07:00
2916b4e5e9 2021-06-01 nightly release (223725cfb08fc6915597d1e16f11d7c51c3c2934) 2021-06-01 00:02:16 -07:00
d13f843261 2021-05-31 nightly release (ea465f73783a130b7a21200a2ec2fa01c885a63b) 2021-05-31 00:02:32 -07:00
be3599d1b9 2021-05-30 nightly release (d68df54269552dc8312b7c2eba23be20c255c59f) 2021-05-30 00:02:14 -07:00
9b497f13f4 2021-05-29 nightly release (c9af4c2636c7ca27be15f39f16512c2f5f36b7fe) 2021-05-29 00:02:07 -07:00
8149145c71 2021-05-28 nightly release (934f6dca65a7521aec7a13a1156b21551196bba0) 2021-05-28 00:02:43 -07:00
fa0e02b7cf 2021-05-27 nightly release (58d1b3639bc07f9519de18e5a18e575f260c7eeb) 2021-05-27 00:02:34 -07:00
b530f7c66b 2021-05-26 nightly release (cf395c071820e4d98a960bc761524b655d262bb3) 2021-05-26 00:04:03 -07:00
6b0e363bd3 2021-05-25 nightly release (f29e75c4dcc3454d50849f3a4670c6ae1d4d3142) 2021-05-25 00:03:02 -07:00
c375b9af1f 2021-05-24 nightly release (e8c6a65074aa039b749edab2b968a4885550dbde) 2021-05-24 00:03:12 -07:00
8c318c5cdd 2021-05-23 nightly release (e8c6a65074aa039b749edab2b968a4885550dbde) 2021-05-23 00:03:29 -07:00
f8f3346997 2021-05-22 nightly release (e8c6a65074aa039b749edab2b968a4885550dbde) 2021-05-22 00:04:14 -07:00
f3a9779b78 2021-05-21 nightly release (e8c6a65074aa039b749edab2b968a4885550dbde) 2021-05-21 00:02:16 -07:00
94e33c13cd 2021-05-20 nightly release (00a46a5eb46073807ec3d28dfd83bf07b206530f) 2021-05-20 00:02:33 -07:00
c5de2f0f2c 2021-05-19 nightly release (00a46a5eb46073807ec3d28dfd83bf07b206530f) 2021-05-19 00:10:34 -07:00
19537812bc 2021-05-18 nightly release (00a46a5eb46073807ec3d28dfd83bf07b206530f) 2021-05-18 00:02:24 -07:00
fc348d8ad2 2021-05-17 nightly release (00a46a5eb46073807ec3d28dfd83bf07b206530f) 2021-05-17 00:02:44 -07:00
7d615e312d 2021-05-16 nightly release (00a46a5eb46073807ec3d28dfd83bf07b206530f) 2021-05-16 00:02:20 -07:00
537fc9b889 2021-05-15 nightly release (00a46a5eb46073807ec3d28dfd83bf07b206530f) 2021-05-15 00:04:07 -07:00
4fb06c9e93 2021-05-14 nightly release (9b95568dc3a81de60435ad155ec87f625f4b69a2) 2021-05-14 00:02:23 -07:00
40b7867cb5 2021-05-13 nightly release (d212bf186390fdcc3b36cd3b41489544c4f50b75) 2021-05-13 00:02:23 -07:00
ef9586b10b 2021-05-12 nightly release (0d4dc6cb39184c2a6ef48cc5238f4df60ec9f14d) 2021-05-12 00:03:01 -07:00
bde9f3aa8c 2021-05-11 nightly release (f97650e70bb8ef42680a700e8c14a288aa3ea661) 2021-05-11 00:02:57 -07:00
9584d83c36 2021-05-10 nightly release (fc55290e5b128d82fa3854565a4d89f60d747fa7) 2021-05-10 00:02:29 -07:00
515bd352fa 2021-05-09 nightly release (e8fb167b1732b2e0ed7dde05f5c59e9a34997a00) 2021-05-09 00:05:09 -07:00
6e6cc22e9a 2021-05-08 nightly release (8c04593c0a486bea7e2cbec348298d348742e096) 2021-05-08 00:02:10 -07:00
c9944ca13c 2021-05-07 nightly release (e5e095cbe4dbc5a601f98e6134dcbd59c6342d7d) 2021-05-07 00:02:26 -07:00
b64472a49d 2021-05-06 nightly release (7627dd568aeda29b2cd757f11493d8a1f0599b60) 2021-05-06 00:11:15 -07:00
513f2a2ada 2021-05-05 nightly release (9c5478588e68a104024dcdaae61c9b402390631c) 2021-05-05 00:02:38 -07:00
a465a9317b 2021-05-04 nightly release (c0309af1f3d57e362a00308c48bca8a5ac2ecb38) 2021-05-04 00:03:00 -07:00
2bd4eadf47 2021-05-03 nightly release (5c68072ee866dde690bc3dda7c2b4bf1a1e6f485) 2021-05-03 00:02:14 -07:00
4ee630a813 2021-05-02 nightly release (afe6b4c8ee7c494c259bbcf8585ffb30a416d924) 2021-05-02 00:02:39 -07:00
adfc9a352b 2021-05-01 nightly release (5e422fa1707f637a7eb8ea9207943d729e13cbda) 2021-05-01 00:02:03 -07:00
1bb0a274a0 2021-04-30 nightly release (65968ab817db323a532f50a2f2ea131ae27dada5) 2021-04-30 00:02:42 -07:00
434be444db 2021-04-29 nightly release (73453f1de112609a34461c84bb16faf74b8b40a4) 2021-04-29 00:03:04 -07:00
6ea01360d9 2021-04-28 nightly release (808850b6de8f92afde0ddf5dce6b94523b126b51) 2021-04-28 00:04:10 -07:00
0fd1dbcff0 2021-04-27 nightly release (7989f2ac879fcc624fc7cc055f437fa849cda5f2) 2021-04-27 00:02:25 -07:00
fb65a2e2ff 2021-04-26 nightly release (70d9be0f42e569d6c74091bbed89ac2d9933ee65) 2021-04-26 00:02:28 -07:00
bc2e5ae70f 2021-04-25 nightly release (bcef7ebd603a6e16a11ab20a030a4338dbb91ff6) 2021-04-25 00:03:07 -07:00
2322ce0268 2021-04-24 nightly release (19943aafe90558a209857a188191df161fd32a6f) 2021-04-24 00:02:35 -07:00
b881571665 2021-04-23 nightly release (19943aafe90558a209857a188191df161fd32a6f) 2021-04-23 00:04:30 -07:00
c7974927b6 2021-04-22 nightly release (19943aafe90558a209857a188191df161fd32a6f) 2021-04-22 00:02:28 -07:00
aca2009f6b 2021-04-21 nightly release (1e03a2505f9cee92587bf45fbbbbfedede5cb9ec) 2021-04-21 00:01:48 -07:00
24ee5bc666 2021-04-20 nightly release (ca6e5c7fc9244ace874bb99aacb12eddef5d4f8a) 2021-04-20 00:02:03 -07:00
6b4502817d 2021-04-19 nightly release (3e42da09dfac51a260a01acd65953e497d027059) 2021-04-19 00:02:20 -07:00
cffbd8af00 2021-04-18 nightly release (85126629a59ac51dcebca08b0ef2da5dd3afd827) 2021-04-18 00:01:53 -07:00
8ed25026ce 2021-04-17 nightly release (98933866a984d104e9f2bf1a91f3a38d3cbe0b99) 2021-04-17 00:01:58 -07:00
10ff790778 2021-04-16 nightly release (bde53cfd9ad3992b21ff30120018017252e8a643) 2021-04-16 00:02:34 -07:00
afff696fee 2021-04-15 nightly release (1688a5d31a740fe502232295ec22e49d376e4f2d) 2021-04-15 00:02:05 -07:00
12d5971c08 2021-04-14 nightly release (19f15317a0c2153aa9ef911e84b939122c9b0769) 2021-04-14 00:02:10 -07:00
a697df1413 2021-04-13 nightly release (19f15317a0c2153aa9ef911e84b939122c9b0769) 2021-04-13 00:02:04 -07:00
e5450b3cc8 2021-04-12 nightly release (19f15317a0c2153aa9ef911e84b939122c9b0769) 2021-04-12 00:01:50 -07:00
3e479451cd 2021-04-11 nightly release (c9b94a85e9029f994f9bd72409293723cbc1d7b1) 2021-04-11 00:01:54 -07:00
552e7d3bd3 2021-04-10 nightly release (7485818a3ff69775b7a01b2d4d971df441702605) 2021-04-10 00:02:17 -07:00
26116b3581 2021-04-09 nightly release (f1a0b817f063250d76aa3bddba6d0d5772a77bf3) 2021-04-09 00:01:49 -07:00
b785f7a276 2021-04-08 nightly release (5f90ed550ccffbe02476a9229ecddc3dc89b3d50) 2021-04-08 00:02:36 -07:00
167a6be779 2021-04-07 nightly release (96655e2b81765d94c359a5dc079ec00404244184) 2021-04-07 00:01:53 -07:00
435d2577d1 2021-04-06 nightly release (b986a76d91906f3e22d6963b16f56e2f8e813a54) 2021-04-06 00:01:53 -07:00
0226b9c72e 2021-04-05 nightly release (6e33420436412d3704d181ebb7fe3f39ba00a087) 2021-04-05 00:02:01 -07:00
a35e8f72a0 2021-04-04 nightly release (6e33420436412d3704d181ebb7fe3f39ba00a087) 2021-04-04 00:01:51 -07:00
33aed5f7bf 2021-04-03 nightly release (6e33420436412d3704d181ebb7fe3f39ba00a087) 2021-04-03 00:01:56 -07:00
729f49d35a 2021-04-02 nightly release (047a487b072354325d50db1510e75ec4218b1a75) 2021-04-02 00:01:54 -07:00
732df435ba 2021-04-01 nightly release (eb52e3646035c8916495515399c2c7ccf490c705) 2021-04-01 00:02:56 -07:00
12be6b82da 2021-03-31 nightly release (920eb01e2e066f6e51f20d0e24a02472e53bc723) 2021-03-31 00:02:04 -07:00
0f545f9a7b 2021-03-30 nightly release (1267efce75e20e571995694c6fbcf4934119395b) 2021-03-30 00:01:49 -07:00
f6458977ba 2021-03-29 nightly release (b7c5d575638afeb7b61594782af438658f6234cc) 2021-03-29 00:02:02 -07:00
3f1f3bf563 2021-03-28 nightly release (7caa464631c1fcc9f9a4290cf0fe7913c36d1796) 2021-03-28 00:01:54 -07:00
6f95b09bb8 2021-03-27 nightly release (394b720e38455c2c07e6a31ef816edd0c5afbeb4) 2021-03-27 00:01:48 -07:00
cae5d728c0 2021-03-26 nightly release (fd58ececab731803eceace70b985b35f9872b53c) 2021-03-26 00:01:52 -07:00
755dd076a3 2021-03-25 nightly release (347ab5d8b86a69c4ab0a163b01d41cd0d0734f59) 2021-03-25 00:01:57 -07:00
13fb17b712 2021-03-24 nightly release (91d37d7d2fd2bb9bc9dcaa3ffd58a39426285af9) 2021-03-24 00:01:52 -07:00
09588132d7 2021-03-23 nightly release (6a4d2c61d5ea12d62bfcf0ad0b9cd79ae776433e) 2021-03-23 00:01:50 -07:00
6f3e9ce0af 2021-03-22 nightly release (6a4d2c61d5ea12d62bfcf0ad0b9cd79ae776433e) 2021-03-22 00:02:07 -07:00
82c758067f 2021-03-21 nightly release (6a4d2c61d5ea12d62bfcf0ad0b9cd79ae776433e) 2021-03-21 00:03:10 -07:00
c3626319c3 2021-03-20 nightly release (454dd7ba8647ac11735c6563ac8e2b60313789ad) 2021-03-20 00:02:11 -07:00
638cdc776a 2021-03-19 nightly release (19792b45dbf30b4555c4a87512e624cdd4aa6e4c) 2021-03-19 00:01:57 -07:00
1bbf0359e8 2021-03-18 nightly release (2e7311ef25a028f06bfff7cf97e6fbf5da767eb5) 2021-03-18 00:03:31 -07:00
b14b3f48b9 2021-03-17 nightly release (b936abd840d8fa3122d3a6166c0d7344d8352901) 2021-03-17 00:02:03 -07:00
3f71714146 2021-03-16 nightly release (e87ab2ac4d4f4d208a1dcbc21b584fe59bf18e5f) 2021-03-16 00:01:50 -07:00
f595ab38d4 2021-03-15 nightly release (f6df18f6ca444561b4108d746ed7b579a78c0294) 2021-03-15 00:02:10 -07:00
3ddc48dbe5 2021-03-14 nightly release (84af0c7acdac705e0465d4865cd37e6cfe2cd537) 2021-03-14 00:02:01 -08:00
49244d58a9 2021-03-13 nightly release (8a5b946ff6ecc068b11d5c40a571af8b8b24137e) 2021-03-13 00:01:57 -08:00
4ec6b36905 2021-03-12 nightly release (997f05cd34078c0d2447f9f310b1f5d6fd041503) 2021-03-12 00:01:53 -08:00
f8c286b72c 2021-03-11 nightly release (0584fd9339af7c939ab7d955db05743ba58ff86d) 2021-03-11 00:01:53 -08:00
30b9583650 2021-03-10 nightly release (a9e4bb56e54e83a69fbe7fad6d637817db1dd6a4) 2021-03-10 00:01:58 -08:00
56e7889e52 2021-03-09 nightly release (a947bfaa2650ea1f4c6e61e94c7e6d3eb3a3d952) 2021-03-09 00:02:00 -08:00
21d09eea0c 2021-03-08 nightly release (36dc5d3b3aaa81280ad290d4b85d4dd033c5c911) 2021-03-08 17:06:31 -08:00
8859a4060b 2021-03-05 nightly release (69bb0e028596d481cda38a0f49bcb5965fab4293) 2021-03-05 00:01:52 -08:00
39496d2d15 2021-03-04 nightly release (d98839e53eaa171230c6d190e51e2fded8fa8fe1) 2021-03-04 00:02:00 -08:00
cc88ac8bbd 2021-03-03 nightly release (30dd15e778195c6b7cf266a0b4b20af9c9ca5ba7) 2021-03-03 00:02:33 -08:00
715835c8ee 2021-03-02 nightly release (748285ccd76b67db7ebc0bc099789bb09c23c038) 2021-03-02 00:01:55 -08:00
70f88165ce 2021-03-01 nightly release (b9e12a0e8290af1c05107cf7e9a6117355e5ac1a) 2021-03-01 00:01:58 -08:00
ad67b30e18 2021-02-28 nightly release (f2f7fdba05159e90b75b780cef29a0640a1f475c) 2021-02-28 00:01:49 -08:00
ea85ae6460 2021-02-27 nightly release (177694681ecc582c5335adbbe43c1ee1e02d53ce) 2021-02-27 00:01:55 -08:00
7e0921bfc4 2021-02-26 nightly release (7a178a8a523d4653a3a2fa10c573b71e7fab1b9a) 2021-02-26 00:01:51 -08:00
edc5fdff12 2021-02-25 nightly release (57947c5d85cb2fc755773ebce2df06b1c7d177b0) 2021-02-25 00:04:07 -08:00
8b997aa6a3 2021-02-24 nightly release (b4b7db2f3bdd67a8650583a49b8a12b24c2c7a78) 2021-02-24 11:13:59 -08:00
c89db19de8 2021-02-23 nightly release (64847c7f0b3559c6edc40f001619b80c7dc68ef7) 2021-02-23 00:04:07 -08:00
3cbcda0694 2021-02-22 nightly release (c2b9283d4aa3f05999926b6871fc1c4bb787c15a) 2021-02-22 00:01:57 -08:00
ad403652be 2021-02-21 nightly release (a935118c90db94879b41d6dc359520e99e6cdca2) 2021-02-21 00:01:49 -08:00
ebd6d2d8c4 2021-02-20 nightly release (bb34fd6191e0f2a31edc38e00d93a24d9da8c031) 2021-02-20 00:01:50 -08:00
c316f4b69f 2021-02-19 nightly release (0c46b6b3f63b0858cb0d3e76ee43ccc2659143a6) 2021-02-19 00:02:18 -08:00
02ae2764a8 2021-02-18 nightly release (ac121165e29d29399615e4ea8fc286ab0864571d) 2021-02-18 00:01:55 -08:00
15f099ad3a 2021-02-17 nightly release (bb9e0c625ebe0e8dd73d896b8a52ff807d969fb3) 2021-02-17 00:01:51 -08:00
a9f93776d4 2021-02-16 nightly release (df837d0384398f1fac1000189d62b05e7ff0240d) 2021-02-16 00:04:22 -08:00
486e771762 2021-02-15 nightly release (1903b32c35a7e7432d62e43a2d3a679f6f01066d) 2021-02-15 00:01:55 -08:00
6c8fea96b3 2021-02-14 nightly release (4949eea0ffb60dc81a0a78402fa59fdf68206718) 2021-02-14 00:02:55 -08:00
08bda18b3f 2021-02-13 nightly release (71d0b5632bfa29f1b6828050edc6512402e253a7) 2021-02-13 00:03:08 -08:00
acd0a1de4c 2021-02-12 nightly release (e4203c4306d86a546d12f9fc354ea78794e2606e) 2021-02-12 00:02:09 -08:00
5c9184d4c4 2021-02-11 nightly release (fd41ed1ccef68e88e075c7b633409644c22f0782) 2021-02-11 00:01:59 -08:00
99bc386e8d 2021-02-10 nightly release (0620c96fd6a140e68c49d68ed14721b1ee108ecc) 2021-02-10 00:02:16 -08:00
567520f3a8 2021-02-09 nightly release (034a007ad80038ad796959387c5a450f7a05ac57) 2021-02-09 00:02:04 -08:00
71e7186e5d 2021-02-08 nightly release (fcf8b712348f21634044a5d76a69a59727756357) 2021-02-08 00:02:08 -08:00
3e934d77c2 2021-02-07 nightly release (0c313564af6173d54b393ae0b70cf2cb0db5a5c8) 2021-02-07 00:02:00 -08:00
256d46ca95 2021-02-06 nightly release (215d9daceb5482d2d5cceaaf44cb3f2aa38faaaf) 2021-02-06 00:02:23 -08:00
a8da1dd168 2021-02-05 nightly release (d3023d86ba6eebbcfeafc1633d1a8f8249d12e6d) 2021-02-05 00:02:07 -08:00
5fdff0d29f 2021-02-04 nightly release (1518aee639b6f9fb4456977431720bd5ecb2ed74) 2021-02-04 00:03:20 -08:00
4b6fea91c7 2021-02-03 nightly release (62f6e55439255d27d41752e2f01715c11e04bd38) 2021-02-03 00:02:01 -08:00
d8fdfb6912 2021-02-02 nightly release (7328710cbce459bbd77dfe424acc94010725d979) 2021-02-02 00:01:53 -08:00
59906439fe 2021-02-01 nightly release (40c0fffb4bb4a7d49cb50c6b276565aa60b3daaf) 2021-02-01 00:02:00 -08:00
fa29005e57 2021-01-31 nightly release (9cf62a4b5d3b287442e70c0c560a8e21d8c3b189) 2021-01-31 00:02:28 -08:00
f34c7916c7 2021-01-30 nightly release (e26fccc22b1f1fa28bceb076567d3e53c92a4c77) 2021-01-30 00:04:16 -08:00
ecc357c395 2021-01-29 nightly release (534aabce147129fc2fa54d448093c79603d45781) 2021-01-29 00:02:38 -08:00
470e6b3037 2021-01-28 nightly release (9fe7c0633f74b04889cce81cea2cbb28f8d08cea) 2021-01-28 00:02:15 -08:00
8d5685876a 2021-01-27 nightly release (19358808604129573dba354558ca4019acf2503f) 2021-01-27 00:02:13 -08:00
adf8b7eb1c 2021-01-26 nightly release (6dda0363bb47fbfbceb242098f63b67560540584) 2021-01-26 10:19:32 -08:00
e64ec0010a 2021-01-25 nightly release (48b6b9221a544f7d5b7d6f93eec2fa304da8af94) 2021-01-25 00:09:03 -08:00
49b090d628 2021-01-24 nightly release (1f40f2a1723fd3b9302bea3829f2570c8e0e7e94) 2021-01-24 00:03:43 -08:00
159eb0d08a 2021-01-23 nightly release (6aec1eba1582ef2c3486652ec0a6cd0ce346b83d) 2021-01-23 00:04:39 -08:00
c51f016767 2021-01-22 nightly release (7494f0233aabb5109e068f3cad73e20dc6a8dd23) 2021-01-22 00:02:55 -08:00
52f6bba29c 2021-01-21 nightly release (439afda090a343e90aa87678a0019c0dc26880cc) 2021-01-21 00:03:48 -08:00
99845753b9 2021-01-20 nightly release (e00966501b63013cd3057daf864cbb54e7f4cf9b) 2021-01-20 00:03:07 -08:00
eb7195e27a 2021-01-19 nightly release (5252e9857a1587a38ca09d316c958eb232e0f68d) 2021-01-19 00:06:10 -08:00
e6279f85c9 2021-01-18 nightly release (3f052ba07b2da4801d8f52f4116357760d5b632d) 2021-01-18 00:07:39 -08:00
43d4eaff8a 2021-01-17 nightly release (7e05d07ca7557b0b433b97a5943c8eedbbf34837) 2021-01-17 00:02:40 -08:00
cc54968f58 2021-01-16 nightly release (a469336292e411a056fbe9445ed948108acde57d) 2021-01-16 00:05:07 -08:00
c4efbf7862 2021-01-15 nightly release (d9f71b5868ca7e36e16e9e6a87ae39c738ccdc5f) 2021-01-15 00:04:29 -08:00
71094fd978 2021-01-14 nightly release (19a8e68d8c2c53f47f1dcfdbadec03f6897d84e6) 2021-01-14 00:04:55 -08:00
4c84d88295 2021-01-13 nightly release (b54240d200fe34fe425857a67a87273bc105665f) 2021-01-13 00:02:38 -08:00
538bf139cb 2021-01-12 nightly release (8c5b0247a571aa672f9070fcc9769f2c4bb19571) 2021-01-12 00:04:11 -08:00
efdef1838a 2021-01-11 nightly release (9d8bd216f9e29251685389a22dce69614ab648a5) 2021-01-11 00:02:43 -08:00
5f767b4f41 2021-01-10 nightly release (375c30a7177442fb9d6de7516a9ae4031ae324c4) 2021-01-10 00:03:26 -08:00
206064ae71 2021-01-09 nightly release (d4c1684cf5ddbbe808d71999d4fdf014c3fdb8b1) 2021-01-09 00:02:27 -08:00
bc83b4980f 2021-01-08 nightly release (c215ffb6a2de1c85a62a146a9e282908335ba3ed) 2021-01-08 00:04:16 -08:00
b7bb2d75a2 2021-01-07 nightly release (968ad47b410b93d2600d163db50eb9fb45c24a2b) 2021-01-07 00:04:09 -08:00
396c67bec6 2021-01-06 nightly release (ad7d208ba5f2c5614679a7999918b75ae74530e9) 2021-01-06 00:05:09 -08:00
21c7ada3a6 2021-01-05 nightly release (ee80b458433627ea8e4f48e7b50043dca1906bb1) 2021-01-05 00:03:00 -08:00
e2681f4d25 2021-01-04 nightly release (e44b2b72bd4ccecf9c2f6c18d09c11eff446b5a3) 2021-01-04 00:03:15 -08:00
4af891460c 2021-01-03 nightly release (8aad66a7bd35782a0e662d133276830204499656) 2021-01-03 00:02:30 -08:00
b788d5159e 2021-01-02 nightly release (8aad66a7bd35782a0e662d133276830204499656) 2021-01-02 00:03:13 -08:00
d7a4e5a328 2021-01-01 nightly release (749f8b78508c43f9e6331f2395a4202785068442) 2021-01-01 00:05:05 -08:00
afb63f9b63 2020-12-31 nightly release (749f8b78508c43f9e6331f2395a4202785068442) 2020-12-31 00:03:08 -08:00
8b6af7fd8f 2020-12-30 nightly release (12b73fdbbf4a89f3ec46983f90a2bb2d866cb338) 2020-12-30 00:02:59 -08:00
3c482552c8 2020-12-29 nightly release (c619892482667f846f96215f290df70d56bd4a84) 2020-12-29 00:03:24 -08:00
606ddb19c9 2020-12-28 nightly release (963f7629b591dc9750476faf1513bc7f1fb4d6de) 2020-12-28 00:05:11 -08:00
95d87beeb1 2020-12-27 nightly release (963f7629b591dc9750476faf1513bc7f1fb4d6de) 2020-12-27 00:04:51 -08:00
9b38aa2443 2020-12-26 nightly release (963f7629b591dc9750476faf1513bc7f1fb4d6de) 2020-12-26 00:02:40 -08:00
6d23d0fe51 2020-12-25 nightly release (963f7629b591dc9750476faf1513bc7f1fb4d6de) 2020-12-25 00:02:57 -08:00
8e7138bce1 2020-12-24 nightly release (89b4899ea5363fd69872c0cabf0dedea2dc533c8) 2020-12-24 00:03:20 -08:00
54837ec26d 2020-12-23 nightly release (55b431b17aba504ae7b75f6f97b4437101e50f38) 2020-12-23 00:04:02 -08:00
58d9290532 2020-12-22 nightly release (590e7168ed66bc1060a0bff6c35cc1eea86db0ae) 2020-12-22 00:02:56 -08:00
b8f60df5f5 2020-12-21 nightly release (5c3788d5d76f64f6708e0b79f40b1cf45276625a) 2020-12-21 00:02:54 -08:00
9ceeb84f52 2020-12-20 nightly release (c0deb231db76dbea8a9d326401417f7d1ce96ed5) 2020-12-20 00:02:40 -08:00
370cb23850 2020-12-19 nightly release (e0f60c97208a9745b4e031b0941ec60b75a7406b) 2020-12-19 00:02:41 -08:00
cfa557ab81 2020-12-18 nightly release (5fcfebd84abc5af3f4816d2a77319dd617d52da1) 2020-12-18 00:03:18 -08:00
006df1bad5 2020-12-17 nightly release (65876d3f5134bba3c6ab5dccd1860ce1b9e43b6d) 2020-12-17 00:03:36 -08:00
902afe6bbb 2020-12-16 nightly release (94e328c0385d2e915d2bc38446d1da2f1cd94068) 2020-12-16 00:05:43 -08:00
450503d815 2020-12-15 nightly release (39a10fb6528b355791993a67b895934e3c902765) 2020-12-15 00:03:40 -08:00
eb6c0edcb4 2020-12-14 nightly release (94a3d4b083c945050a93fefcdd20d7d336185f2d) 2020-12-14 00:03:44 -08:00
b50c85260f 2020-12-13 nightly release (dc4db95540da06623c747bf0f2bf9f4af3d2925a) 2020-12-13 00:02:58 -08:00
637ad9acd8 2020-12-12 nightly release (717f31d9846f9b7707f9b1ab6076e1353f399119) 2020-12-12 00:02:49 -08:00
2275fa0a44 2020-12-11 nightly release (2b1057b0cf2eaea633a90b2e130612d8f93eeb2b) 2020-12-11 00:03:11 -08:00
ff32f65844 2020-12-10 nightly release (27f7d1c2865355c694fb964609df45974748615b) 2020-12-10 00:07:21 -08:00
ffe1744602 2020-12-09 nightly release (2d9585a6a10ea5717c6523086259391684c5aaf2) 2020-12-09 00:03:38 -08:00
6b1df384ea 2020-12-08 nightly release (cb6233aa538114fce55380a79978f3e576eb7cfe) 2020-12-08 00:05:02 -08:00
dff5a4324f 2020-12-07 nightly release (a39398b9e5d528e4a6ca293f1703833932f0d9b2) 2020-12-07 00:02:43 -08:00
c58b37f253 2020-12-06 nightly release (17f53bffefa7bd46e134a99bcd4c4e471b81c86b) 2020-12-06 00:02:41 -08:00
19f40be8c8 2020-12-05 nightly release (6317e0b2f1090ea4189e88557d4ff6656fb758cc) 2020-12-05 00:04:00 -08:00
ea248f6464 2020-12-04 nightly release (2181ff89bb6126b1382f35d7cfca23ee45f40fc9) 2020-12-04 00:03:29 -08:00
2e5c4409a0 2020-12-03 nightly release (79b9c034656652cad838a15094536fe38344ab2c) 2020-12-03 00:03:37 -08:00
c87acc2ecc 2020-12-02 nightly release (bcc85a363ecbcb381623d3c26faa94be13ad58bc) 2020-12-02 00:04:43 -08:00
492c0eb536 2020-12-01 nightly release (ccd20e995fb56a1aafefd90c0f2c68c537534c67) 2020-12-01 00:56:29 -08:00
d3dbcaf423 2020-11-30 nightly release (755b8158e2a664ad308bab3cc0b6f4ae14aefd39) 2020-11-30 00:04:24 -08:00
1cabe03694 2020-11-29 nightly release (36c87f1243f582ae761ec6d69f5965bd91c32d3c) 2020-11-29 00:04:52 -08:00
2221ff5a44 2020-11-28 nightly release (272f4db043ec2c63ecfe6d2759e7893cb842a3c3) 2020-11-28 00:04:01 -08:00
1b0b4998ee 2020-11-27 nightly release (7df84452423f44ebe1db40a2e3463066bf954f95) 2020-11-27 00:02:58 -08:00
c10ca6b85f 2020-11-26 nightly release (18ae12a841bdc99c6cce65ac5c77cc1149dc8564) 2020-11-26 00:05:35 -08:00
b5e7a24057 2020-11-25 nightly release (e9efd8df1bce365d2dddbbc9e9266b7432aee5b2) 2020-11-25 00:03:13 -08:00
168b0224aa 2020-11-24 nightly release (671ee71ad4b6f507218d1cad278a8e743780b716) 2020-11-24 00:03:27 -08:00
88092c79b7 2020-11-23 nightly release (4ed7f36ed181ee784f9904d5eabf073701e1fb78) 2020-11-23 00:02:56 -08:00
90af5978b6 2020-11-22 nightly release (f2da18af143b3fdb322d2188dace6e2020e301b9) 2020-11-22 00:02:42 -08:00
4f320939f0 2020-11-21 nightly release (fdc62c74a60a5cd3a833d50a6f23045e9c55035e) 2020-11-21 00:03:16 -08:00
7ad15e28f6 2020-11-20 nightly release (eb49dabe925b2fa32897dae771f831cb056aef65) 2020-11-20 00:02:57 -08:00
a4262a1db7 2020-11-19 nightly release (8819bad86cd15c74dd076012c59e7b1f6081fda4) 2020-11-19 00:03:09 -08:00
5a09c3b029 2020-11-18 nightly release (df0ae244a999a365ac13695cf4682ea30490b157) 2020-11-18 00:02:46 -08:00
0f0c2ba3e8 2020-11-17 nightly release (49f0e5dfeb64a928c6a2368dd5f86573b07d20fb) 2020-11-17 00:02:39 -08:00
984b5f0945 2020-11-16 nightly release (013e6a3d9d0d3ebe8283bf9af0f3b666ba4d96b7) 2020-11-16 00:03:12 -08:00
c36b7c99a6 2020-11-15 nightly release (8ef7ccd669a36f49eb052d12a71df9b7836cafa5) 2020-11-15 00:04:25 -08:00
b553e35ddf 2020-11-14 nightly release (982ae987d3572b211b371e5c2a7f8062b1ae86f1) 2020-11-14 00:03:21 -08:00
17d86337b4 2020-11-13 nightly release (16d6af74e6605941f51ffd9cacca15e40fa9eec8) 2020-11-13 00:04:31 -08:00
9bc97fcaf0 2020-11-12 nightly release (4b25d83e9bdaee701967d7aff625cedf4c12913c) 2020-11-12 00:03:50 -08:00
20721e12b8 2020-11-11 nightly release (ae5c2febb912066f1a8dec8b54451c09195b2c6d) 2020-11-11 00:02:53 -08:00
0a7dab4ce7 2020-11-10 nightly release (f2eac5df1840b65b24ef2f1c3bd6332f654935b6) 2020-11-10 00:02:38 -08:00
1ca45fae2d 2020-11-09 nightly release (9e0102c10f57631000ebf28e55a055f480b9b780) 2020-11-09 00:02:38 -08:00
e9dd9f342a 2020-11-08 nightly release (781e0ed835e1bfbf6613720c89ba98a4de53e2e9) 2020-11-08 00:02:44 -08:00
6c2cd89110 2020-11-07 nightly release (9d0c6e9469028cd7e7f44172e30aa4ce21b64be3) 2020-11-07 00:03:07 -08:00
1e9fe285da 2020-11-06 nightly release (873652d9accc54f8d2a3fac4baf1c926af755025) 2020-11-06 00:03:02 -08:00
f463bdbe57 2020-11-05 nightly release (d8c3b2b10c14f86f817f1ef12cabc8ef5e37c30a) 2020-11-05 00:03:43 -08:00
6071dc770f 2020-11-04 nightly release (996f444c007a89f7364ed03b7c24755f7ec43eb0) 2020-11-04 00:03:13 -08:00
d60c452534 2020-11-03 nightly release (0ead9d545a92e5d4bed99661b108545a0126826d) 2020-11-03 00:04:54 -08:00
1c10830b51 2020-11-02 nightly release (1cc1da541158b53c24a579fd316d190d5c27bfd1) 2020-11-02 00:02:44 -08:00
b5d9a1c61e 2020-11-01 nightly release (19ede75eb95b2436d5e493497a38c1e6be5ec50b) 2020-11-01 00:02:36 -07:00
ec10b116d2 2020-10-31 nightly release (ee0033af9b7531ba0a453844dc621f8cfa140c20) 2020-10-31 00:04:53 -07:00
3ee3d3b7eb 2020-10-30 nightly release (d95e1afad37ab91a7c8d3fed19d86f2b7f2d218f) 2020-10-30 00:03:00 -07:00
f71e46c37f 2020-10-29 nightly release (b553c06abbe7133c059e05844033067625382973) 2020-10-29 00:02:39 -07:00
d25dcd31ae 2020-10-28 nightly release (ddbdbce6232f896f47b81fc13f95b536cf274d20) 2020-10-28 00:03:01 -07:00
ea2c6ca48e 2020-10-27 nightly release (6b50ccc41cd9feb624243a518e1362659f2fada4) 2020-10-27 00:03:16 -07:00
81d3931f01 2020-10-26 nightly release (d94bd998ece87a1af996ad047e98b1ccba2c3a85) 2020-10-26 00:02:44 -07:00
06e4087ffa 2020-10-25 nightly release (edbc84aa4ae6b9d0410e581c31e8c309426b2d29) 2020-10-25 00:03:10 -07:00
ebbb40c985 2020-10-24 nightly release (13b7855f33e61e3c466197518250e57ffe5886db) 2020-10-24 00:04:54 -07:00
7a69d3fa6b 2020-10-23 nightly release (4fd2cce9fa939006b97abca6e66c117bb42b2b2e) 2020-10-23 00:03:33 -07:00
96741a8516 2020-10-22 nightly release (fe4f90c40bfcb442eddcd26657158bd8fd43a464) 2020-10-22 00:02:41 -07:00
b466478440 2020-10-21 nightly release (8357e2edc329b3a62979ceae3faa1e51fd70150e) 2020-10-21 00:04:07 -07:00
11c8a58712 2020-10-20 nightly release (e5ed037529264e6747a33145aad364660cb250db) 2020-10-20 00:02:41 -07:00
3292b9e050 2020-10-19 nightly release (0c5cd8c2b9cdf473e30bbb1b49ca80ed442813df) 2020-10-19 00:03:03 -07:00
f57dc3adb8 2020-10-18 nightly release (c3466dabaae9328b207804afb043b7b519f64825) 2020-10-18 00:02:39 -07:00
342069f090 2020-10-17 nightly release (cc471c6dafff9b3a45575ba01244b1b58a25ab34) 2020-10-17 00:02:25 -07:00
b1eed8fe32 2020-10-16 nightly release (ec5f81f9d334efa3d46ca9c66885e2ab14b6ce01) 2020-10-16 00:03:20 -07:00
e5ed02e3f0 2020-10-15 nightly release (a87a1c110342dff912963aac1d8095878c79f914) 2020-10-15 00:02:42 -07:00
3142764e51 2020-10-14 nightly release (d790ec6de01a61fe81733c41a64b6092bacfb7bd) 2020-10-14 00:03:00 -07:00
9e25229618 2020-10-13 nightly release (7f6a1b2bd5eca2d25c70afe233f8b6878fec241d) 2020-10-13 00:02:42 -07:00
5015a81dda 2020-10-12 nightly release (a814231616f5db7fcdb8793f64c12d652d1e8572) 2020-10-12 00:02:42 -07:00
73a00339a9 2020-10-11 nightly release (bbb3f0937743ddd006d8a5400e9549623260e6be) 2020-10-11 00:02:34 -07:00
8f6131de72 2020-10-10 nightly release (b7261de0df87c4df548451cbfbe020c62396bd86) 2020-10-10 00:03:06 -07:00
403616e494 2020-10-09 nightly release (87226f72d2827d0367d01b664efa21d80ccaf632) 2020-10-09 00:02:25 -07:00
3ada667f23 2020-10-08 nightly release (c19b9cd18dd1da5f499ef0672e6871928618204d) 2020-10-08 00:07:08 -07:00
69474f119a 2020-10-07 nightly release (205ab4961232140d3a46a6a3f59b377243bb5407) 2020-10-07 00:02:33 -07:00
72429b08dc 2020-10-06 nightly release (2fbe5971b360f63715fcccf4bb61ac7c44dff48c) 2020-10-06 00:04:23 -07:00
8722e9d287 2020-10-05 nightly release (e1ff46b6e5b752a889174ba4a995e74898799287) 2020-10-05 00:02:25 -07:00
8706bf8b1f 2020-10-04 nightly release (2ab74a48395c55d9b186a01bdb09fdd347bf4f1b) 2020-10-04 00:03:20 -07:00
2fc129834c 2020-10-03 nightly release (d8a9c2c27e0d11fdde1d8e5bfbce1b55fa2cd478) 2020-10-03 00:02:21 -07:00
29f546df64 2020-10-02 nightly release (1a2d3b6a750cc4c1709b64412619b28c737e1a79) 2020-10-02 00:02:41 -07:00
17635b8851 2020-10-01 nightly release (4339f5c076e6f59433ac4eec08b6151a803a7604) 2020-10-01 00:02:30 -07:00
1c087f9b30 2020-09-30 nightly release (c2c7099944e94fb7d0460ac8689819b42ba0e30e) 2020-09-30 00:02:23 -07:00
c54443b19e 2020-09-29 nightly release (6bdb871d47f8810fb627d908c4fa2408fa6a632f) 2020-09-29 00:03:39 -07:00
e3234fd375 2020-09-28 nightly release (c3bf402cbbd5bba53fa8b7d86500e4dc3bb904b8) 2020-09-28 00:02:59 -07:00
f3659fb14b 2020-09-27 nightly release (bc5710f2f746d33645858ed82141076ab5fec75d) 2020-09-27 00:02:20 -07:00
4a6dd6ec6a 2020-09-26 nightly release (d9af3d2fcd4e86c1b410f700dc6361e5c538b0af) 2020-09-26 00:02:44 -07:00
d4a3a4ca6e 2020-09-25 nightly release (99e0a87bbb4faa6bb539c0eedf323d79fdd8cfcf) 2020-09-25 00:03:07 -07:00
ce6dd35b28 2020-09-24 nightly release (b3d7c2f97859973c7282a772b811708379064d37) 2020-09-24 00:02:57 -07:00
eeb9914f09 2020-09-23 nightly release (215679573ebff5a03238a7f9aa801a6c00826f19) 2020-09-23 00:04:30 -07:00
af6cc04916 2020-09-22 nightly release (5aed75b21bf81f14f1c527df8832f5a93299019c) 2020-09-22 00:02:30 -07:00
8de4375712 2020-09-21 nightly release (9f67176b821afe4afa75df22f43f4932f1289ef4) 2020-09-21 00:04:36 -07:00
b64de1730d 2020-09-20 nightly release (da7863f46bff365499730244a3198a9d4b201f7c) 2020-09-20 00:02:36 -07:00
420a345397 2020-09-19 nightly release (9e5045e978d8800a6dbeb919745169e4de18927c) 2020-09-19 00:02:26 -07:00
145abbc193 2020-09-18 nightly release (f2b3480795d4364aba32de18e42e1309ffd013f4) 2020-09-18 00:03:00 -07:00
27cdcdc69a 2020-09-17 nightly release (b6f4bb0a703d6c33bedf3c7f4551954a8015fcb9) 2020-09-17 00:02:40 -07:00
b5c2df117b 2020-09-16 nightly release (b85568a54a9c60986235ad1e0cc5dffc71b9d5b1) 2020-09-16 00:03:32 -07:00
4c94add238 2020-09-15 nightly release (f5d231d5933e0ca8feb59f9fe76913d8aa720457) 2020-09-15 10:11:47 -07:00
e5b3332958 2020-09-15 nightly release (2435d941b13723b4ccae1eb65695e2c902b9399d) 2020-09-15 00:02:29 -07:00
ce76eb5dc4 2020-09-14 nightly release (870f6470405a37392eb37ea7e927bca6adfecc3c) 2020-09-14 00:02:42 -07:00
7ad07dab6c 2020-09-13 nightly release (e703c17967ba7565ca8c4a019511b0b7b46a2d5d) 2020-09-13 00:02:22 -07:00
680262a4a5 2020-09-12 nightly release (05c1f1d974910736275b4c4572095a106499c298) 2020-09-12 00:13:43 -07:00
521dce53ee 2020-09-11 nightly release (b5d75dddd93089458c7aee91134ff281d5c3b580) 2020-09-11 00:02:39 -07:00
61fd94ce73 2020-09-10 nightly release (65d4a6b7c05674935da8cbd3ae68f982dd9272ff) 2020-09-10 00:04:37 -07:00
beea9fe377 2020-09-09 nightly release (15cbd1cf4b2cd37e102a495f5e5d8362d2c6e6e2) 2020-09-09 12:26:09 -07:00
bac811d8a8 2020-09-09 nightly release (135ebbde6dda16887cc2c08962bb3694af0f8e82) 2020-09-09 00:02:55 -07:00
5200db437e 2020-09-08 nightly release (cce5982c4ceb77a0797e8bd4c717ebfec0681eab) 2020-09-08 00:02:56 -07:00
2d48355fe2 2020-09-07 nightly release (626e410e1dedcdb9d5a410a8827cc7a8a9fbcce1) 2020-09-07 00:03:07 -07:00
72cbcceccf 2020-09-06 nightly release (70c8daf43946b53af6493d058899ef952d27d339) 2020-09-06 00:02:29 -07:00
a6ebbbad37 2020-09-05 nightly release (5a0d65b06b197c06797a41de04ce99edbe48f669) 2020-09-05 00:02:25 -07:00
b0e834edbe 2020-09-04 nightly release (98ad5ff41f82086ce77d5519945aff288b030c6e) 2020-09-04 00:02:42 -07:00
ccfd128bb8 2020-09-03 nightly release (ab7606702cca34c06f0bf08d6b109402c574563c) 2020-09-03 00:02:37 -07:00
9bef2cdcca 2020-09-02 nightly release (297c938729a0adbd98a235a5970c972f6148adfe) 2020-09-02 13:38:07 -07:00
bae900999d 2020-09-02 nightly release (7000c2efb54bae527de22a325c2b59e9f9af45b9) 2020-09-02 00:02:53 -07:00
cb188a89fa 2020-09-01 nightly release (6da26cf0d96475a2fa4d0aa235f868eff198546e) 2020-09-01 00:04:06 -07:00
c32d9b1874 2020-08-31 nightly release (8fb7c50250f9cb38232bf87008855fd1665b31ec) 2020-08-31 00:03:14 -07:00
a27ceb1a7e 2020-08-30 nightly release (1830e4f08cd3b41df9d643211762156b67371611) 2020-08-30 00:03:02 -07:00
42cb14b81d 2020-08-29 nightly release (000739c31ad34909e64124e0d39b2f49249458e9) 2020-08-29 00:02:39 -07:00
dade70d4af 2020-08-28 nightly release (f06d3904f2c7b6e47ffa532f4a22b9e18941bb90) 2020-08-28 00:03:10 -07:00
0f03b7c404 2020-08-27 nightly release (a070c619b9bed1205ec95b53f97db3fbfc43c4de) 2020-08-27 00:05:38 -07:00
8e23641871 2020-08-26 nightly release (42f6c3b1f475c3c1cb1655a70b128fa5cf236860) 2020-08-26 10:11:03 -07:00
f361befd6c 2020-08-26 nightly release (2b70f827373fe5c90e15ce7ba0a1d331dcd57379) 2020-08-26 00:02:35 -07:00
2b2b063dfd 2020-08-25 nightly release (d1d32003bbe1cb06de552c3581982d1bc8125cd9) 2020-08-25 00:02:45 -07:00
17857c55a3 2020-08-24 nightly release (1f0cfbaaad09921f588adf549751041b8cb2e283) 2020-08-24 00:02:41 -07:00
8d4f134db7 2020-08-23 nightly release (a97ca93c0e698b81599f7a0ca5cdbda947799431) 2020-08-23 00:02:40 -07:00
13f00ef652 2020-08-22 nightly release (88b564ce3927f7a691c29697b2b21873a84a0d23) 2020-08-22 00:06:42 -07:00
f3070b6b37 2020-08-21 nightly release (da036250cd7dae8896fdc14b2bc718bcb3bdd158) 2020-08-21 00:03:56 -07:00
0b355b5f8c 2020-08-20 nightly release (66a79bf114ee4e9a4adf0d83b48f2266cfaeab95) 2020-08-20 10:29:04 -07:00
c75d13a67a 2020-08-20 nightly release (4e964f3b97558792d6af903425561f765b1c5d68) 2020-08-20 00:03:14 -07:00
7915de9032 2020-08-19 nightly release (018b4d7abb894c3de21f9f10b0678c80a7b0a701) 2020-08-19 14:55:57 -07:00
6f80e1207a 2020-08-19 nightly release (fa6b34b54c731938327c8e30e08b287a10b86b0a) 2020-08-19 00:02:57 -07:00
14b1e2392c 2020-08-18 nightly release (c44b1de54e29e9d33365fe0116a43d3958f5a450) 2020-08-18 00:02:32 -07:00
02b98413b7 2020-08-17 nightly release (e2eb0cb1a9e525de9abb352cf39c85cc090e33b8) 2020-08-17 00:02:31 -07:00
865a01d2eb 2020-08-16 nightly release (5bcf9b017a426e55466cf7435009d1fb339684d8) 2020-08-16 00:02:36 -07:00
4458315a89 2020-08-15 nightly release (91f3114fc1350dd2e1797e6edc5c61c44f2ffecd) 2020-08-15 00:03:22 -07:00
ac414f6c7e 2020-08-14 nightly release (b9a105bcc03d27f68b53e32258ee4c252570dcc9) 2020-08-14 00:03:33 -07:00
936fa35cf1 2020-08-13 nightly release (c9dcc833bca69a0f2dd3518ce97136206da8024f) 2020-08-13 00:03:43 -07:00
b7c4a72c53 2020-08-12 nightly release (ecb9e790ed6ceafa738ad52a500b9e50bc0fc241) 2020-08-12 00:02:47 -07:00
c531bf36cd 2020-08-11 nightly release (ddcf3ded3e78042a41fd6d6b861d678e11460821) 2020-08-11 00:02:53 -07:00
1eed3cba2a 2020-08-10 nightly release (d7aaa3327bf9ad8757897a76879230da92bf607f) 2020-08-10 12:37:23 -07:00
c8173d2cb5 2020-08-10 nightly release (55ac2405893165de0687960a6650bbc5c8f29e73) 2020-08-10 00:02:26 -07:00
818b9ac780 2020-08-09 nightly release (b6810c1064eb24cb542a9be56140689dce8ad7a1) 2020-08-09 00:02:28 -07:00
97c7575520 2020-08-08 nightly release (b7a9bc0802b68cff07958937cfda2271155689e5) 2020-08-08 00:02:23 -07:00
5dec2a9909 2020-08-07 nightly release (cb1ac94069700e47f7283c41820d3c2de0af9553) 2020-08-07 00:02:52 -07:00
c81b784a72 2020-08-06 nightly release (644d787cd8d9512b92d18e9a94d6f3a401b5ca69) 2020-08-06 00:03:49 -07:00
5415138490 2020-08-05 nightly release (0f358fab6b92307edba4d89a855cac395f5b1565) 2020-08-05 00:03:04 -07:00
40277cdaae 2020-08-04 nightly release (ae67f4c8b89547d648b66a49048ce13982ed6be7) 2020-08-04 00:03:10 -07:00
51381481e2 2020-08-03 nightly release (bfa94487b968ccb570ef8cd9547029b967e76ed0) 2020-08-03 00:02:59 -07:00
36fa26aae5 2020-08-02 nightly release (91c80d122ab271d36ce37d60acf430fdbd54d249) 2020-08-02 00:03:17 -07:00
69909359f4 2020-08-01 nightly release (44b018ddeb89e9c6283d3a1c17549848cf77d6a4) 2020-08-01 00:03:38 -07:00
006129dc63 2020-07-31 nightly release (2f840b1662b487d5551d7230f8eb4d57645cfff5) 2020-07-31 00:06:51 -07:00
87867a04f9 2020-07-30 nightly release (0444bac940695ccfa43854f9241839f759ba5f28) 2020-07-30 00:03:38 -07:00
7e19a4a05b 2020-07-29 nightly release (6b3f335641e1874c5b0ae0aa719687ed5f42ab45) 2020-07-29 00:03:42 -07:00
e99cef1e0b 2020-07-28 nightly release (4f723825b48e555512813fcffd56e89e5b16eeaf) 2020-07-28 00:03:07 -07:00
1da1361946 2020-07-27 nightly release (96aaa311c0251d24decb9dc5da4957b7c590af6f) 2020-07-27 00:02:27 -07:00
bfbc0a6fcf 2020-07-26 nightly release (b7bda236d18815052378c88081f64935427d7716) 2020-07-26 00:04:40 -07:00
2f4f3b4fb7 2020-07-25 nightly release (366c014a7799f0b7bbc258fd6c271dadb99d1de0) 2020-07-25 00:02:25 -07:00
f83664fb35 2020-07-24 nightly release (c0bfa45f9d26fb814a80f3053fbc550ff77d82c1) 2020-07-24 00:02:36 -07:00
c6ef1bac44 2020-07-23 nightly release (0ec7ba40888f07678d3a1d766817154dc90bef90) 2020-07-23 00:02:44 -07:00
10c53d9390 2020-07-22 nightly release (5c9918e757b019564e74ae6f676cacfe70a87afd) 2020-07-22 00:03:14 -07:00
a41d75b78a 2020-07-21 nightly release (fe415589a964697a78186189bd4815541f853b47) 2020-07-21 00:02:27 -07:00
bb29acf111 2020-07-20 nightly release (bd42e1a082f21d189492a950533970caabf2d089) 2020-07-20 00:03:24 -07:00
33f735f3ec 2020-07-19 nightly release (a69a2628109a55f7f965bb79eca8967fce685a6a) 2020-07-19 00:02:41 -07:00
0fcfa4aca3 2020-07-18 nightly release (e7a09b4d17010e60bc95c25f8165ef479d1b9612) 2020-07-18 00:02:55 -07:00
b18ecab49c 2020-07-17 nightly release (e3e58e20cd57892fd39e4bda33a2c249cab6f739) 2020-07-17 00:03:42 -07:00
7fb344967b 2020-07-16 nightly release (04c0f2e3cc7be79bfa005d1670b19cfb7dee308c) 2020-07-16 00:03:01 -07:00
21dba79af2 2020-07-15 nightly release (9552ec787cf7a004ed7a6063d45bb5e2d85e1d60) 2020-07-15 00:03:03 -07:00
7affad5f48 2020-07-14 nightly release (80d5b3785b88f83eb598e393d0137f045b979c4b) 2020-07-14 00:02:49 -07:00
62b00bcddb 2020-07-13 nightly release (0c77bd7c0bbd4d6e50a5f3ce7b4debbee85d7963) 2020-07-13 12:58:41 -07:00
b7e29fb2c6 2020-07-13 nightly release (67a4f375cdf06113ca959b4e16739edb666f243f) 2020-07-13 00:02:49 -07:00
7bc96af0a7 2020-07-12 nightly release (4b4184fc69e224701529b84d2f182c1d65cea74a) 2020-07-12 00:02:37 -07:00
1ec86870fc 2020-07-11 nightly release (a1ed6e1eb30c03db55881bca536e5edf64622a1b) 2020-07-11 00:02:33 -07:00
5797d683a2 2020-07-10 nightly release (df252c059c3ea4c04f8c39c3b4c25a3f4572b0b7) 2020-07-10 00:03:27 -07:00
2360093da9 2020-07-09 nightly release (bf9cc5c77698da260e806ce07a690770b6d26253) 2020-07-09 00:02:31 -07:00
0a0d5b9a80 2020-07-08 nightly release (e2a291b396be8db6f9ac1fc7d45e97c6026df590) 2020-07-08 00:02:31 -07:00
f4f757998d 2020-07-07 nightly release (22c7d183f7c95a70f75acb310c1abc79d674fb18) 2020-07-07 11:13:40 -07:00
52e3d4aba1 2020-07-07 nightly release (4aa543ed2e3ff9e9e6aab4979efca559a62f22b7) 2020-07-07 00:02:22 -07:00
cbac4f50d3 2020-07-06 nightly release (300a3aaaadb5edaac818713b1752e9af976336f9) 2020-07-06 00:02:26 -07:00
9a199cf853 2020-07-05 nightly release (8ecd4f36aab89df38500e72458af5e05e7f12a78) 2020-07-05 00:02:28 -07:00
d9a61a1671 2020-07-04 nightly release (480851ad2c21e8e39e336d849b8030a2f91718d7) 2020-07-04 00:02:15 -07:00
b2718d2b21 2020-07-03 nightly release (824ab19941b09164b099c0a614b78984b9c68cf9) 2020-07-03 00:04:57 -07:00
b9bde4e47b 2020-07-02 nightly release (ea03f954ada77bfbfef542ce93be2f525c8d8e64) 2020-07-02 00:02:42 -07:00
05bcdcd112 2020-07-01 nightly release (fcadca1bdab62ea5728e49397e023c8863466e83) 2020-07-01 00:02:57 -07:00
6bdf560205 2020-06-30 nightly release (871bfaaba1da0eff28d5a992cc86a2bb745e6621) 2020-06-30 11:14:52 -07:00
ca6a9756c2 2020-06-30 nightly release (b9cca4b186797b4ccdc12197b3b71db94d8e2297) 2020-06-30 00:02:32 -07:00
9a7a41a96d 2020-06-29 nightly release (b35cdc5200af963e410c0a25400fd07f30b89bca) 2020-06-29 00:02:19 -07:00
2c978b42a4 2020-06-28 nightly release (502ec8f7f76619e6cbb1efb0e23eb8c5438daed2) 2020-06-28 00:02:34 -07:00
09c29e5af2 2020-06-27 nightly release (61a8de77cfc50ba0d12348065a7d399663ba6511) 2020-06-27 00:02:23 -07:00
28d1a1436b 2020-06-26 nightly release (eebd492dcfb8d4152a5ba4240486f3df5aaf112d) 2020-06-26 00:02:56 -07:00
64d71c446e 2020-06-25 nightly release (eae1ed99a35eac4e7b6daf7123136e832c50a158) 2020-06-25 00:02:46 -07:00
1d4e5f4a25 2020-06-24 nightly release (7b0f867c4853af69b14bb516ba2a9dbff8f56d51) 2020-06-24 00:02:33 -07:00
8752b4048d 2020-06-23 nightly release (e9efad687875a7de1e599dda776ed1e9509d01eb) 2020-06-23 00:02:44 -07:00
48a48958f3 2020-06-22 nightly release (c8738957221e3edd0aa2fc2be84c037fd0611881) 2020-06-22 12:49:39 -07:00
aa61b8bc13 2020-06-22 nightly release (ab8a99bd36b9969f6226f0dfd62510e3dd6ecb51) 2020-06-22 00:02:27 -07:00
d6e183bda7 2020-06-21 nightly release (3bbedb34b9b316729a27e793d94488b574e1577a) 2020-06-21 00:02:52 -07:00
e51edad5d3 2020-06-20 nightly release (9da277c63557784d6a2ea7c44d1c469d7178d444) 2020-06-20 00:02:52 -07:00
c51aaa7f08 2020-06-19 nightly release (86b1afa039f3fba83f082382b7c62b896fb98e09) 2020-06-19 00:02:39 -07:00
681f9c3458 2020-06-18 nightly release (d1a0e880750bd2a7a88ff3465df3af7f05261790) 2020-06-18 00:02:55 -07:00
309d8f4dea 2020-06-17 nightly release (9d588f7ce26775ffad0035dcc4c1128d62b889fc) 2020-06-17 00:02:54 -07:00
37dddadd6a 2020-06-16 nightly release (bcb44796ba00f9ac5f22e33f1de3a24b277c4c3a) 2020-06-16 00:03:48 -07:00
d630b39c4f 2020-06-15 nightly release (541814f2b7eacabacdc87ccb1b4495bf486f501a) 2020-06-15 00:02:18 -07:00
9fca12ab19 2020-06-14 nightly release (8072f0685f5bd9bc8f1e48ef916518fa31a50826) 2020-06-14 00:02:19 -07:00
c82098341b 2020-06-13 nightly release (8d3fcb43cf5e58d05351107bc99ee5e2ce8ae5b9) 2020-06-13 00:04:02 -07:00
7b4d0fbaa9 2020-06-12 nightly release (c068233300f1473e3119b8d6b8b672c2fb912f1d) 2020-06-12 00:02:03 -07:00
195f29e3ba 2020-06-11 nightly release (c3d4053bc0648f76906208a69d863276c8b82995) 2020-06-11 00:02:27 -07:00
31019eecb8 2020-06-10 nightly release (6bdfd6ae1adb341f5bae2fc8bc987653b8a319ad) 2020-06-10 00:01:56 -07:00
bd13707502 2020-06-09 nightly release (af05158c56af29e062580f458a86a32b8f4c2b85) 2020-06-09 00:02:44 -07:00
41ff48299b 2020-06-08 nightly release (6c56671fd96ac71346c818d0a5ea8087575e9cba) 2020-06-08 00:01:58 -07:00
61a9383920 2020-06-07 nightly release (e4627e5dba1516afe0c12bf748ebc2200b6e378d) 2020-06-07 00:01:59 -07:00
b5da8c9f3e 2020-06-06 nightly release (6a75f650dd3bf6d3fdaafffe7298d141c0377489) 2020-06-06 00:02:17 -07:00
2f447b8e25 2020-06-05 nightly release (8a6914ddb225196d9b4c712ad7a82b464a80c0e5) 2020-06-05 00:02:36 -07:00
00de97d13a 2020-06-04 nightly release (fe684679b06f7f2fe7a7e136ea5605c04254b652) 2020-06-04 12:31:20 -07:00
645b018a22 2020-06-04 nightly release (03eca384fd87ee07c73b6406c3e32ee69bb13fb0) 2020-06-04 00:02:18 -07:00
6fdf7aa7a2 2020-06-03 nightly release (a05ef17e467a152e86e3de1d47868dd4a1d9d648) 2020-06-03 00:02:01 -07:00
588bf42df3 2020-06-02 nightly release (f4365cf5ba8ee4ba319385cbd2ff7b9a825a75d0) 2020-06-02 00:02:14 -07:00
c3dc364b18 2020-06-01 nightly release (caaea084e9e63dd3f22edcab0fda6822f7634820) 2020-06-01 00:02:00 -07:00
860bd46af1 2020-05-31 nightly release (8556664d6896a8e7f48f1c419e06e0568b9ee09e) 2020-05-31 00:02:09 -07:00
825ee6cbb3 2020-05-30 nightly release (b7b99ab0c8f82100177729b9751481852d83e77e) 2020-05-30 00:02:07 -07:00
c1afe3d886 2020-05-29 nightly release (b08a4aaf3bdf59e80590fc2b4a7c821be2213699) 2020-05-29 00:02:06 -07:00
43beca9d58 2020-05-28 nightly release (f5bc91f851f7d3b862643b51f06f0281eb225b8c) 2020-05-28 00:01:59 -07:00
f0711fe9c6 2020-05-27 nightly release (916084d933792f7ee619aee7155fedf68d1a8cd1) 2020-05-27 00:02:22 -07:00
f01bd55177 2020-05-26 nightly release (d035d05080729c30636ff30fcc068de3c7e9badd) 2020-05-26 00:02:00 -07:00
31d8b6eabd 2020-05-25 nightly release (d035d05080729c30636ff30fcc068de3c7e9badd) 2020-05-25 00:02:12 -07:00
74b18f831e 2020-05-24 nightly release (d035d05080729c30636ff30fcc068de3c7e9badd) 2020-05-24 00:03:13 -07:00
7c4f81427c 2020-05-23 nightly release (d035d05080729c30636ff30fcc068de3c7e9badd) 2020-05-23 00:01:53 -07:00
04734358e7 2020-05-22 nightly release (d035d05080729c30636ff30fcc068de3c7e9badd) 2020-05-22 00:02:14 -07:00
991faa295f 2020-05-21 nightly release (e9902358df14dc4809e4f50b12088a5200a1862d) 2020-05-21 00:01:58 -07:00
0a55aad1ea 2020-05-20 nightly release (1465970a343e61f2f2b104859ca7f5d7e03f5d02) 2020-05-20 00:02:15 -07:00
fe43bd1a77 2020-05-19 nightly release (f6f1384811b9cc722f650ed9ead8ee99938c009a) 2020-05-19 00:01:52 -07:00
8216138905 2020-05-18 nightly release (176174a68ba2d36b9a5aaef0943421682ecc66d4) 2020-05-18 00:01:53 -07:00
c680caafb1 2020-05-17 nightly release (176174a68ba2d36b9a5aaef0943421682ecc66d4) 2020-05-17 00:02:01 -07:00
c683033c5b 2020-05-16 nightly release (83df3beacad3e22bdfa95c1d3514af59dceaacdc) 2020-05-16 00:02:02 -07:00
8167c5996d 2020-05-15 nightly release (c0bc1827611f4ad6370e87af35571bfe3cc91e4f) 2020-05-15 00:02:02 -07:00
053c725853 2020-05-14 nightly release (96885f73ed79c4d3d01717248f24694e05e1b041) 2020-05-14 00:01:54 -07:00
5fdf2cdc0e 2020-05-13 nightly release (61bea93fca35ad7ace2c9856499d3f478ffb1377) 2020-05-13 00:02:04 -07:00
f5bcbc5400 2020-05-12 nightly release (cf82011361592c0f55d1ce9e55c692108308374d) 2020-05-12 00:01:56 -07:00
c361c5a57c 2020-05-11 nightly release (324dc1623e2f91892038fb1b151450a7c6529dd9) 2020-05-11 00:02:03 -07:00
5d263b7b81 2020-05-10 nightly release (26928b164f3e43a48bc57fc41ca277583501054d) 2020-05-10 00:01:58 -07:00
e241ab52a0 2020-05-09 nightly release (45010833069c02162fee40291ef7d444dfe7f41b) 2020-05-09 00:02:19 -07:00
2311a48ce4 2020-05-08 nightly release (4c358b8b726f56bc78ce20bb031e10869145de94) 2020-05-08 00:02:45 -07:00
5c7548f1bd 2020-05-07 nightly release (9143d7fb687acb4aff2ca7f8e1fc13c2af7a941b) 2020-05-07 00:01:59 -07:00
b372ce156d 2020-05-06 nightly release (b57b596f20b004116053e821d76ee08d46e55e39) 2020-05-06 00:02:19 -07:00
d84fbdc752 2020-05-05 nightly release (429d90f6487b276b02bd13b26c5291678412b389) 2020-05-05 00:01:54 -07:00
4154b913b5 2020-05-04 nightly release (843c0230f2928aad61a6940688da3a6cd6c4cd57) 2020-05-04 00:01:59 -07:00
bb75b0b420 2020-05-03 nightly release (5baa6b6c34c9a61c936f90168b6449421814553d) 2020-05-03 00:01:57 -07:00
0d1ee45511 2020-05-02 nightly release (d6394183078d3525f64b27ec0659b994a78c42dd) 2020-05-02 00:01:59 -07:00
3c5465887a 2020-05-01 nightly release (9e32a1f5cdcefa3ab55d91121e10d37e8fc850ea) 2020-05-01 00:01:53 -07:00
4684f1fe19 2020-04-30 nightly release (68250fa557cc3079bdd88aee4bee2d9222fbb34d) 2020-04-30 00:02:19 -07:00
4070924186 2020-04-29 nightly release (4bfa51d40595d41d6bec6d4a9b1ade72adde4ebc) 2020-04-29 00:02:11 -07:00
d055855a96 2020-04-28 nightly release (ebcacd5e878f3f8e84c941d6b9567c88f50a2f10) 2020-04-28 00:02:00 -07:00
52f688abbc 2020-04-27 nightly release (f41742ff2fd5c9507c037dc120d75f6f191a87b1) 2020-04-27 00:02:50 -07:00
7bce785c8b 2020-04-26 nightly release (b64fc3c4b5d927928770f9b343eb845123367084) 2020-04-26 00:02:15 -07:00
d3b0b98dbc 2020-04-25 nightly release (6e659e928ba48afa8a6f5d734c37ab187734927b) 2020-04-25 00:02:08 -07:00
1021e473d6 2020-04-24 nightly release (827f04a075d9f564e0871f809ae67f5538b30ab9) 2020-04-24 00:02:02 -07:00
572542260d 2020-04-23 nightly release (b889e0da8ab33e0b47abc817e679851299728be7) 2020-04-23 00:02:18 -07:00
5213e2cf39 2020-04-22 nightly release (799793f27951dbf1abd01c3a1082e6edd525f19e) 2020-04-22 00:02:25 -07:00
23c4dbd92b 2020-04-21 nightly release (a1eb591ea6afa07d2bc6e644bc0697a585dbdd7e) 2020-04-21 00:02:11 -07:00
2c106114c9 2020-04-20 nightly release (0f0d69009eb1dbc58feacc819bc9885990e888aa) 2020-04-20 00:02:31 -07:00
130773838d 2020-04-19 nightly release (b0b9e704ed1f31ed1c81994c20e5a9f3715d2583) 2020-04-19 00:01:52 -07:00
54f8763e8f 2020-04-18 nightly release (197c85fcbc487d707e0e2635a5291da7c8658036) 2020-04-18 00:02:02 -07:00
59c6f7575b 2020-04-17 nightly release (a89d1ed5496dccce310b3dd08a417b5de71e8332) 2020-04-17 00:01:53 -07:00
57f9ef23d8 2020-04-16 nightly release (7539ea0207859d5e20f6e836818bfafa77f3045e) 2020-04-16 00:01:55 -07:00
5ddf7a5733 2020-04-15 nightly release (9cac2b83d9e6838de796e0cecfbacaa6a64dc6a8) 2020-04-15 00:02:09 -07:00
8cfa076820 2020-04-14 nightly release (dd64e738c5cb191a5280b19635fe9941d6c0b95a) 2020-04-14 00:02:13 -07:00
e15f869505 2020-04-13 nightly release (b92f8d9b7e2e8543e2808c1d05c09e57c19ce5d9) 2020-04-13 00:02:03 -07:00
013659bc5b 2020-04-12 nightly release (d83509e603d6d932a818d4e0ca027129aa9c5857) 2020-04-12 00:02:09 -07:00
81f40df4e5 2020-04-11 nightly release (409346eee308017336fc73a4ae1e9b60777b7b49) 2020-04-11 00:02:06 -07:00
cabe3becc0 2020-04-10 nightly release (31dca07fa5673c115ae6e9e68a09b969918c49d8) 2020-04-10 00:01:53 -07:00
4ba93a8a3e 2020-04-09 nightly release (f59e646faa6ec7c388735aeda4a5bd7eb8eb0be2) 2020-04-09 00:01:56 -07:00
1061602446 2020-04-08 nightly release (f0bddd5e7a89a7f8037a27180b27aeb1a90b6f6a) 2020-04-08 00:01:55 -07:00
50aefcfa05 2020-04-07 nightly release (3e5d25fdfd6dc22cfc697a83d924ff386fdf51b3) 2020-04-07 00:02:24 -07:00
598d03f9a9 2020-04-06 nightly release (82d58ed484eb04f894475de0551055f7e070f481) 2020-04-06 00:02:00 -07:00
28295bbcce 2020-04-05 nightly release (b3cdec88e364e56bc69bf42cd6d8583fcfd62a18) 2020-04-05 00:02:30 -07:00
06d935798d 2020-04-04 nightly release (e3e2dd7779974307cc5caa75d539adac690f5ca5) 2020-04-04 00:02:09 -07:00
021d8fa26d 2020-04-03 nightly release (1a146b0577ac7737cc8eb28ecf4c10fd713dd4c5) 2020-04-03 00:01:49 -07:00
d0937d2101 2020-04-02 nightly release (2f50c119545c654088bb2796fa23e747099ca053) 2020-04-02 00:01:54 -07:00
66d3b83708 2020-04-01 nightly release (ee6f7c3e62a179178e8e0cc3ab1aaedfa820d04d) 2020-04-01 00:01:54 -07:00
73f6729ccf 2020-03-31 nightly release (a090de380c1d0be68b26476d69d38a3291268c69) 2020-03-31 00:01:57 -07:00
77f1aef758 2020-03-30 nightly release (340048b67c53f05a04def9e5a69403c7b49915c8) 2020-03-30 00:02:07 -07:00
29ccceee5c 2020-03-29 nightly release (67c382294443cca408347ce68d73d5084301c5a2) 2020-03-29 00:01:52 -07:00
554de41a6e 2020-03-28 nightly release (683246e5ead92503e1022418cab33c93bfbc77e3) 2020-03-28 00:02:02 -07:00
565f305f3a 2020-03-27 nightly release (ac639d927a37f58bda16c7a1b96a2dfcf3a21666) 2020-03-27 00:02:02 -07:00
a49ed6d378 2020-03-26 nightly release (be0cdf5d157a84dd28bd95ca8f5f82306c0340f5) 2020-03-26 00:01:59 -07:00
dd073cd4fc 2020-03-25 nightly release (17abb7c31a30ee8e5037528b7b6552a25b7132e7) 2020-03-25 14:19:03 -07:00
fc211ce302 2020-03-25 nightly release (d6377b7cef0c7bc822df80eedd86689bc51e83d2) 2020-03-25 00:02:04 -07:00
addd3250a4 2020-03-24 nightly release (8b8af0d4580f74680240a88a34fea78be02a6d22) 2020-03-24 00:02:26 -07:00
e8c963122a 2020-03-23 nightly release (a6672f3b305c3805c691f3477e7940d146130a88) 2020-03-23 00:02:27 -07:00
f3908c0ee6 2020-03-22 nightly release (a5b509985a37127fb52fbfdee85c7b336cd8d2c1) 2020-03-22 00:01:50 -07:00
37ea717b03 2020-03-21 nightly release (95ad94c75b09ad2438141e4eb52e83e737966e60) 2020-03-21 00:02:00 -07:00
8b2e30b3f5 2020-03-20 nightly release (ec9f6809734b4cada98d02138ad8de75ce790273) 2020-03-20 00:02:28 -07:00
2730db1362 2020-03-19 nightly release (c8f665dcb626378ca9c51d7b70fc242524105441) 2020-03-19 00:02:22 -07:00
f1a11aee2b 2020-03-18 nightly release (bcbdba450c25cdf7c6bbe590880cf39f0984123c) 2020-03-18 00:01:58 -07:00
5d40408bfd 2020-03-17 nightly release (089a0a211776ac928a9fefb1841e1d44c9b4dee1) 2020-03-17 00:01:57 -07:00
e9bd3eca1b 2020-03-16 nightly release (bdd7dbfd4b75e66a88d393993b41c77f576f74fc) 2020-03-16 00:04:09 -07:00
36718f7877 2020-03-15 nightly release (6c555e1508184f2cbf34c6ec96d557809808fe85) 2020-03-15 00:02:09 -07:00
d3d99e47ef 2020-03-14 nightly release (fb20621b3b80aef95485574f53cceecaf51ac8be) 2020-03-14 00:02:08 -07:00
bac80526bd 2020-03-13 nightly release (fd355965857f5c281d0b60be864745e7b7a223bd) 2020-03-13 00:02:28 -07:00
fc35f28aa5 2020-03-12 nightly release (c235be42ddecc3c4e1cdf192ca2764455082c1c6) 2020-03-12 00:05:52 -07:00
c09e7d1b26 2020-03-11 nightly release (2ce9513b0c8894987f6d42bfb57ff95b22e32c95) 2020-03-11 00:04:29 -07:00
f9ef5dafca 2020-03-10 nightly release (4f62cbe7de8f867809ca601c996dd48a5a28ffca) 2020-03-10 00:08:48 -07:00
d44877c493 2020-03-09 nightly release (7e55494502478d7f78138bc974e681df52b0635c) 2020-03-09 00:02:22 -07:00
81e7c5374a 2020-03-08 nightly release (79d47c1c5ff5306bdd275196b7171c04bebbdcca) 2020-03-08 00:19:17 -08:00
e80485cacc 2020-03-07 nightly release (65bad41cbec096aa767b3752843eddebf845726f) 2020-03-07 00:02:51 -08:00
fbd3eeb1e7 2020-03-06 nightly release (9a5e9d8cecb1c68eebd69618729a7ec5671b2a06) 2020-03-06 00:03:33 -08:00
d499ac979a 2020-03-05 nightly release (2b79bab029ec387618f6b9724a4027b81fbda33c) 2020-03-05 00:04:15 -08:00
b236f19a6a 2020-03-04 nightly release (f097ca503d349fa352281925fc6df72e52fff077) 2020-03-04 00:02:50 -08:00
ea8063a8aa 2020-03-03 nightly release (0afee0c20bc9c3b4261157d5a5f8eee437954a2a) 2020-03-03 00:03:22 -08:00
3b175e7c33 2020-03-02 nightly release (a500491cbc51c093b5990a69c6066479c13e360c) 2020-03-02 00:03:45 -08:00
38aabee374 2020-03-01 nightly release (ace2b4f37f26b8d7782dd6f1ce7e3738f8dc0dec) 2020-03-01 00:02:54 -08:00
bd482deca4 2020-02-29 nightly release (5a8562a6af2f4097975ad079e982a88924c4fd07) 2020-02-29 00:02:32 -08:00
e5f2f9d8c7 2020-02-28 nightly release (f5f1e5e7f66502a6a3f53f22f5034fdef8f040e9) 2020-02-28 00:02:24 -08:00
d2665bc828 2020-02-27 nightly release (973371139475834c44d118ba0256d97b070980a0) 2020-02-27 00:07:20 -08:00
9f327c18e3 2020-02-26 nightly release (758ad516f32708de243f194144ee0f7b9e0f5117) 2020-02-26 00:02:40 -08:00
496939b5fa 2020-02-25 nightly release (4460c8b034f8fd544ff9c271c4aa21698644d352) 2020-02-25 00:02:35 -08:00
bd8e7b182f 2020-02-24 nightly release (039dc9085425e18b8c217f6e33a1c3dd072dd48f) 2020-02-24 00:02:31 -08:00
6693e040e5 2020-02-23 nightly release (6d448acb345d2f21a6ddc6bd4b57a27fc785112d) 2020-02-23 00:02:05 -08:00
9718bd1498 2020-02-22 nightly release (8291e06f8fb14a09e6c3b7a740a182250069242c) 2020-02-22 00:02:08 -08:00
5ec369fd8b 2020-02-21 nightly release (a943b0518bee8f3ad5f6c44a30fbdd3382f2209c) 2020-02-21 00:02:12 -08:00
6ba25af498 2020-02-20 nightly release (e95282ab282032369289232e844ee61fbdf3a4e9) 2020-02-20 00:03:33 -08:00
63db28f43a 2020-02-19 nightly release (d13c1b8af803eecb7586c3760e6e93efb51e6677) 2020-02-19 00:02:30 -08:00
1e1c56b0fd 2020-02-18 nightly release (dde2ff46084202f646632d32aece341c911ff269) 2020-02-18 00:02:29 -08:00
3529bc52ce 2020-02-17 nightly release (87dc2dbcce2f46bbc861ef7e884a0e6b99290cf5) 2020-02-17 00:02:49 -08:00
3b952ed61f 2020-02-16 nightly release (f6808df75fb3ac4bc6b5f17a1d960f069e1faaea) 2020-02-16 00:02:07 -08:00
33e3497594 2020-02-15 nightly release (d35a4c202e917b5d5cb67ac749fcf7931212c25b) 2020-02-15 00:02:15 -08:00
03a562bf9e 2020-02-14 nightly release (ecd3c252b4da3056797f8a505c9ebe8d68db55c4) 2020-02-14 00:02:59 -08:00
b974dc1a0b 2020-02-13 nightly release (b98c7d34ed718774db28651220eb8bb71b2e8baf) 2020-02-13 00:04:26 -08:00
f7b3186fa4 2020-02-12 nightly release (9d9fa2eace07f2833bcd73551b52bf31bbdbc093) 2020-02-12 00:08:14 -08:00
c58acf210e 2020-02-11 nightly release (9857d9b4cd4b50c326c0d8309e92e5bbde083a84) 2020-02-11 00:04:23 -08:00
fa524b1ca1 2020-02-10 nightly release (e2f12885140c36c1d5bf82de6eb47797856fdacd) 2020-02-10 00:03:15 -08:00
121046fe2e 2020-02-09 nightly release (e2f12885140c36c1d5bf82de6eb47797856fdacd) 2020-02-09 00:03:14 -08:00
e9a11f3014 2020-02-08 nightly release (6249d7302b7277864ed0ade93f58d88ee0cd3aa8) 2020-02-08 00:05:28 -08:00
d180c32586 2020-02-07 nightly release (05d18ffaf5ccc5a19245afafe3998fc3731b570d) 2020-02-07 00:02:37 -08:00
77c97edf1e 2020-02-06 nightly release (e76fa9822dc4f980791d1bf01d63b2d4e9993ced) 2020-02-06 00:04:15 -08:00
1c420dd12b 2020-02-05 nightly release (1b446aa2ee540c6fbd029619459962aa7af3f168) 2020-02-05 00:13:49 -08:00
57a3554274 2020-02-04 nightly release (b894dc06de3e0750d9db8bd20b92429f6d873fa1) 2020-02-04 00:08:04 -08:00
e121e114d2 2020-02-03 nightly release (29e6f13cd1c309cf3dfd8ba45a6ca199359422b5) 2020-02-03 00:02:34 -08:00
6bc40330a5 2020-02-02 nightly release (71ad88199abb837286cdec73ccfcfb9f1a0a2356) 2020-02-02 00:02:18 -08:00
35d1dd656d 2020-02-01 nightly release (c83f9849067c5f14ce8e61f84b62af1215af5133) 2020-02-01 00:03:53 -08:00
ed119e9754 2020-01-31 nightly release (ed10408cc64d1acf0e325a3dda01f7b911350052) 2020-01-31 00:02:26 -08:00
7ef2fd057b 2020-01-30 nightly release (821b6aa769645c8190703b7d8e2cc9f36597853a) 2020-01-30 00:08:51 -08:00
9784297745 2020-01-29 nightly release (5e2311033ebc4bb6009f70db68e4d63ba73a1633) 2020-01-29 00:02:54 -08:00
7a245af681 2020-01-28 nightly release (2060e0a9dd197f8116635dc7517dde5191c0b1a0) 2020-01-28 15:25:48 -08:00
ace7cb80fd 2020-01-28 nightly release (8e4161517e112478a1c1f0290fedb91965f95aff) 2020-01-28 00:02:51 -08:00
db7d91084b 2020-01-27 nightly release (1e5aead35b3d2dca993f90e7d3343cc105ab93e1) 2020-01-27 00:02:08 -08:00
989298c613 2020-01-26 nightly release (90a259e1e22d14c79f788520c836d78b7f6325ba) 2020-01-26 00:02:41 -08:00
2e96268be1 2020-01-25 nightly release (e0ffe72649cef3a1dfd321545194d1b3574975c7) 2020-01-25 00:02:48 -08:00
b068dc341c 2020-01-24 nightly release (3ada2e0d64b40622e823b8135d2bbbc74e6526b9) 2020-01-24 00:02:13 -08:00
05641e89d5 2020-01-23 nightly release (21d475e20d8e9ae1652548e33c3be76383a821d3) 2020-01-23 00:07:25 -08:00
9ae9ff0f09 2020-01-22 nightly release (44b270d892176d6526801bdfa5227a8caa9852bf) 2020-01-22 00:03:23 -08:00
8b77987981 2020-01-21 nightly release (ecbf6f99e6a4e373105133b31534c9fb50f2acca) 2020-01-21 00:03:38 -08:00
0ba313bc2f 2020-01-20 nightly release (ecbf6f99e6a4e373105133b31534c9fb50f2acca) 2020-01-20 00:02:06 -08:00
80d85d6d95 2020-01-19 nightly release (58234c0254f135d1e3714b17f44f4922d56d41b4) 2020-01-19 00:02:25 -08:00
72f778b3d8 2020-01-18 nightly release (1ecad2bb2b9cf7c76657250688fb56af2e87886a) 2020-01-18 00:05:21 -08:00
e13d222815 2020-01-17 nightly release (ef5ae4823a00923342c82e08dd06adf579f5bc26) 2020-01-17 00:07:47 -08:00
be1384f850 2020-01-16 nightly release (d75b6b3f9d41c4f82fb2f5a04e8c9b27361e2701) 2020-01-16 00:05:31 -08:00
35bd8fc853 2020-01-15 nightly release (4dce482acb2f0b248e4886b3069dca8e3a1b7681) 2020-01-15 00:04:50 -08:00
90cb5cd111 2020-01-14 nightly release (62b1a5f8466991b3e042345e1c786d520c3b9d91) 2020-01-14 00:02:33 -08:00
aa881c569d 2020-01-13 nightly release (14593f077f9cb248cca85fd18b598d14c47d5d4e) 2020-01-13 00:04:24 -08:00
8ec30d8da4 2020-01-12 nightly release (927c2a02b0b29a0fafcced8d65896dd417023067) 2020-01-12 00:09:43 -08:00
b324108e81 2020-01-11 nightly release (927c2a02b0b29a0fafcced8d65896dd417023067) 2020-01-11 00:02:48 -08:00
0fa8c2c780 2020-01-10 nightly release (8ea49e7a0897b77a2cd6bf854efd80ef593acf02) 2020-01-10 00:02:55 -08:00
004e3ab791 2020-01-09 nightly release (26f552a3d10f9952636346173a1c34e824556c98) 2020-01-09 11:06:55 -08:00
0ae8b50609 2020-01-09 nightly release (0dbd5c0bfe9c4af1ecb0fccc669277d6eafd0d2b) 2020-01-09 00:02:20 -08:00
ab79b1861b 2020-01-08 nightly release (9116f02bebf3a5260feef5732d36c54ecb3b4033) 2020-01-08 00:05:01 -08:00
3ea026f4ab 2020-01-07 nightly release (a561a8448b6b7bb8f6068178fe7bc9e2bf348304) 2020-01-07 00:14:56 -08:00
b5620f68e6 2020-01-06 nightly release (33430cf0946650058118096f78741440676e33c0) 2020-01-06 00:02:22 -08:00
8936e4ed33 2020-01-05 nightly release (3f0b330736a2af1331e49e8e12e880dffd1e5974) 2020-01-05 00:09:33 -08:00
9944deb456 2020-01-04 nightly release (2bac76969c3a7306a818cb35983fee3f3005131a) 2020-01-04 00:02:39 -08:00
9c908cb9c3 2020-01-03 nightly release (95cb66570af68ec93b5a72144a4f8eee45c354dc) 2020-01-03 00:04:06 -08:00
4313ac1d3e 2020-01-02 nightly release (cb1af5f61fb338c591e6427fd274ea5b44df4f26) 2020-01-02 00:03:28 -08:00
505aad770c 2020-01-01 nightly release (cb1af5f61fb338c591e6427fd274ea5b44df4f26) 2020-01-01 00:02:30 -08:00
8c2e381be8 2019-12-31 nightly release (b102550d2c103cd4e2d7cb9535d730417b7c8ef2) 2019-12-31 00:02:47 -08:00
73a5627944 2019-12-30 nightly release (ee87b01f4093208d527fc0bd740723c2fa4f1feb) 2019-12-30 00:07:29 -08:00
86523be4f1 2019-12-29 nightly release (22d84204f7188ed4042505e723eef46f048ff47c) 2019-12-29 00:08:28 -08:00
e67d8245f9 2019-12-28 nightly release (90a187618ef043edb40483746fba16ddb0b9918b) 2019-12-28 00:03:33 -08:00
9fe8cf51de 2019-12-27 nightly release (b522a8e1ff8a531c4ac75a3551b99d5b40125cf0) 2019-12-27 00:02:40 -08:00
fa96a94943 2019-12-26 nightly release (a54dc87e8ebf7634d3fb2f32dd32b73c1a4d095f) 2019-12-26 00:03:14 -08:00
776a8b0c01 2019-12-25 nightly release (a54dc87e8ebf7634d3fb2f32dd32b73c1a4d095f) 2019-12-25 00:08:27 -08:00
9eb235f1a5 2019-12-24 nightly release (363d8be787cb324a451b8511abea4e5bf05f376f) 2019-12-24 00:02:25 -08:00
3d6e73b978 2019-12-23 nightly release (fe76af96eddbf62df0d2722220b907e56e3ee8e6) 2019-12-23 00:02:20 -08:00
6998eb5c60 2019-12-22 nightly release (7d630278daee00ea2db6bc01e8a2a5f160bd8e81) 2019-12-22 00:02:09 -08:00
087fe33bef 2019-12-21 nightly release (700109eb630b79fd65cb93becb7f2d14f93bdb5c) 2019-12-21 00:05:00 -08:00
c899e0f249 2019-12-20 nightly release (4c341582ea18f241628c6e1756633bc9f897b76d) 2019-12-20 00:03:23 -08:00
fc345e9b5c 2019-12-19 nightly release (1e116a5089a353fa5186e436addbc4dd08d73f68) 2019-12-19 00:05:38 -08:00
1fb749785c 2019-12-18 nightly release (3c8892aa0cd80e62f01b1ba3c99e4f5fd713868c) 2019-12-18 00:04:05 -08:00
bb462a28c7 2019-12-17 nightly release (0e548a76ebc42d604d2c26ed586b899e2a25ded4) 2019-12-17 00:02:15 -08:00
28efaa7b99 2019-12-16 nightly release (409151e1bb90c66546901d372f59930f72242ee9) 2019-12-16 00:02:14 -08:00
96449cd8e6 2019-12-15 nightly release (ec92711aac5537bb509535c74bf59ccbf4d72db2) 2019-12-15 00:02:04 -08:00
74de2ce96f 2019-12-14 nightly release (36d17f4105f48d264fe085ce3dc0e4e825ebfb42) 2019-12-14 00:02:26 -08:00
4954096451 2019-12-13 nightly release (f7c92f60ba25ec5c50e125a4098913684448deca) 2019-12-13 00:02:15 -08:00
35cdcd2d31 2019-12-12 nightly release (2488231fe32e6e2c86f58acaa34766a6daa19235) 2019-12-12 00:02:20 -08:00
827fe3ff0a 2019-12-11 nightly release (8013ffd400b80ea3478e4b38daf9049647bf1550) 2019-12-11 00:03:19 -08:00
e8dc63722a 2019-12-10 nightly release (5205556782a71bdb8d90bcf0dcc58262713056b6) 2019-12-10 00:06:41 -08:00
6f08a756d1 2019-12-09 nightly release (190dac13e33267b4808a280fbc449ad672ec7544) 2019-12-09 00:04:51 -08:00
cefedb5547 2019-12-08 nightly release (6848f9abb82edc4e03e34b2c45a6c559a45a6c7c) 2019-12-08 00:02:27 -08:00
e2195755f4 2019-12-07 nightly release (63f1b780ba584384aef515ee3405f5847234787a) 2019-12-07 00:06:39 -08:00
7f6fc267c8 2019-12-06 nightly release (d32aec5ad627290516c17f37deeae04cafa06eab) 2019-12-06 00:05:51 -08:00
3fd4c696d9 2019-12-05 nightly release (a939b52ddbb0e1f9b7bf6b3a90dd4f7b331ef8de) 2019-12-05 00:02:45 -08:00
2561aea9fa 2019-12-04 nightly release (7e472679ff95fe4d23bcad89e98b8a6a2594880a) 2019-12-04 00:02:39 -08:00
d68cce5399 2019-12-03 nightly release (18ec4632b335521c8497ca2760a386c6d0d10a64) 2019-12-03 00:02:14 -08:00
66186d66e7 2019-12-02 nightly release (c780610f2d8358297cb4e4460692d496e124d64d) 2019-12-02 00:02:02 -08:00
bb9fdbeb5b 2019-12-01 nightly release (dd52f50fc85e6710f020936c1fc5f14673508350) 2019-12-01 00:03:15 -08:00
60d1c2048a 2019-11-30 nightly release (dd52f50fc85e6710f020936c1fc5f14673508350) 2019-11-30 00:02:51 -08:00
1e33e3ff3c 2019-11-29 nightly release (dd52f50fc85e6710f020936c1fc5f14673508350) 2019-11-29 00:02:09 -08:00
801675b97a 2019-11-28 nightly release (1350b99de4f13dfecfac335c088a79ff1f7e6adc) 2019-11-28 00:03:47 -08:00
3b178ac7ba 2019-11-27 nightly release (829499e626362e6a1badeba4e28c0f7a7a120fde) 2019-11-27 00:04:56 -08:00
2b44caa054 2019-11-26 nightly release (b8f50d9cc860460c7e9b6d3b370cab546e9f9583) 2019-11-26 00:02:13 -08:00
f8779b3945 2019-11-25 nightly release (3990e9d1ca23e39bfa65fe1d907ddb9f9dbf0919) 2019-11-25 00:02:52 -08:00
dfcf37b25a 2019-11-24 nightly release (3990e9d1ca23e39bfa65fe1d907ddb9f9dbf0919) 2019-11-24 00:02:20 -08:00
c1c03d34de 2019-11-23 nightly release (328ec5460f159ad9d42ddec463eaff2c118b092e) 2019-11-23 00:03:11 -08:00
1ac88375e4 2019-11-22 nightly release (48b943960e98082f103b9cbbdac54ec1eeca3fc9) 2019-11-22 00:02:32 -08:00
91110f4b2a 2019-11-21 nightly release (f7b12a985812f278b225fd58549ac95c7864a22c) 2019-11-21 00:06:35 -08:00
85ab8fc4b7 2019-11-20 nightly release (7495c25440709213cf9e018e7929a1a691c34489) 2019-11-20 00:03:35 -08:00
13f5548df1 2019-11-19 nightly release (a9ad2e2f00e9b7d9b30af5c0c6db348432ad7964) 2019-11-19 00:02:54 -08:00
95eee138ed 2019-11-18 nightly release (bb217eee985529b87f9582d6e4d6cb452fcd3909) 2019-11-18 00:03:42 -08:00
b6b29af92a 2019-11-17 nightly release (b011461c9f4bee46075cdf23a13d75c35baed612) 2019-11-17 00:02:06 -08:00
1b1e2c69e2 2019-11-16 nightly release (455b5c1a7d1503c03853e37b5a2caf60948359eb) 2019-11-16 00:04:39 -08:00
a3eaa3fac6 2019-11-15 nightly release (d22f61432dc71315029641b01bf264482bc9c5ca) 2019-11-15 15:49:07 -08:00
dc8c0d2ad7 2019-11-15 nightly release (d22f61432dc71315029641b01bf264482bc9c5ca) 2019-11-15 14:03:45 -08:00
031bd7a195 2019-11-15 nightly release (e1a309a647c5096bddaf44c423cc5dea8d6cae6c) 2019-11-15 00:03:24 -08:00
96a8c5838a 2019-11-14 nightly release (9fd7db616a0e220a74f4459958ea2d3186d5e55a) 2019-11-14 00:01:54 -08:00
fcfd231cbb 2019-11-13 nightly release 2019-11-13 00:02:28 -08:00
692a412019 2019-11-12 nightly release 2019-11-12 00:02:10 -08:00
c302ad7241 2019-11-11 nightly release 2019-11-11 03:00:50 -08:00
4700557819 2019-11-10 nightly release 2019-11-10 03:00:46 -08:00
36ddaddedc 2019-11-09 nightly release 2019-11-09 03:03:17 -08:00
3d0a871767 2019-11-08 nightly release (f362ae1f72525138b0b5e6240fc480f72148712f) 2019-11-08 13:10:28 -08:00
945c9128dc 2019-11-08 nightly release 2019-11-08 03:01:53 -08:00
76e078d3b0 2019-11-07 nightly release 2019-11-07 03:01:36 -08:00
40453f8c2d 2019-11-06 nightly release 2019-11-06 03:02:19 -08:00
2169a327ea 2019-11-05 nightly release 2019-11-05 03:02:39 -08:00
8bd6991397 2019-11-04 nightly release 2019-11-04 03:01:55 -08:00
cd324a9107 2019-11-03 nightly release 2019-11-03 03:04:23 -08:00
51770b859f 2019-11-02 nightly release 2019-11-02 03:01:32 -07:00
c4a27d2dad 2019-11-01 nightly release 2019-11-01 03:01:17 -07:00
4e8f1b5ee3 2019-10-31 nightly release 2019-10-31 03:03:17 -07:00
9328ceaba3 2019-10-30 nightly release 2019-10-30 03:01:42 -07:00
7fd0514bec 2019-10-29 nightly release 2019-10-29 03:04:52 -07:00
97ce0ad2e2 2019-10-28 nightly release 2019-10-28 03:05:15 -07:00
a9ffa7dbfe 2019-10-27 nightly release 2019-10-27 03:01:31 -07:00
30ae96a55e 2019-10-26 nightly release 2019-10-26 03:03:27 -07:00
8ee799199f 2019-10-25 nightly release 2019-10-25 03:01:22 -07:00
18edd71061 2019-10-24 nightly release 2019-10-24 03:02:07 -07:00
2b630217db 2019-10-23 nightly release 2019-10-23 03:02:12 -07:00
4b1da15d9b 2019-10-22 nightly release 2019-10-22 03:07:48 -07:00
201ea71b2e 2019-10-21 nightly release 2019-10-21 03:04:27 -07:00
6ea77adf16 2019-10-20 nightly release 2019-10-20 03:01:18 -07:00
6e0fb1c894 2019-10-19 nightly release 2019-10-19 03:07:41 -07:00
e0b14d085e 2019-10-18 nightly release 2019-10-18 03:02:23 -07:00
fb77773d5f 2019-10-17 nightly release 2019-10-17 03:04:03 -07:00
5c86840692 2019-10-16 nightly release 2019-10-16 03:01:01 -07:00
21ad376b4e 2019-10-15 nightly release 2019-10-15 03:02:07 -07:00
47dab98ccb nightly release 2019-10-14 03:00:32 -07:00
1398 changed files with 36027 additions and 15638 deletions

View File

@ -113,6 +113,7 @@ case "$tag" in
UCX_COMMIT=${_UCX_COMMIT}
UCC_COMMIT=${_UCC_COMMIT}
TRITON=yes
INSTALL_MINGW=yes
;;
pytorch-linux-jammy-cuda13.0-cudnn9-py3-gcc11)
CUDA_VERSION=13.0.0
@ -361,6 +362,7 @@ docker build \
--build-arg "OPENBLAS=${OPENBLAS:-}" \
--build-arg "SKIP_SCCACHE_INSTALL=${SKIP_SCCACHE_INSTALL:-}" \
--build-arg "SKIP_LLVM_SRC_BUILD_INSTALL=${SKIP_LLVM_SRC_BUILD_INSTALL:-}" \
--build-arg "INSTALL_MINGW=${INSTALL_MINGW:-}" \
-f $(dirname ${DOCKERFILE})/Dockerfile \
-t "$tmp_tag" \
"$@" \

View File

@ -83,10 +83,6 @@ function build_cpython {
py_suffix=${py_ver::-1}
py_folder=$py_suffix
fi
# Update to rc2 due to https://github.com/python/cpython/commit/c72699086fe4
if [ "$py_suffix" == "3.14.0" ]; then
py_suffix="3.14.0rc2"
fi
wget -q $PYTHON_DOWNLOAD_URL/$py_folder/Python-$py_suffix.tgz -O Python-$py_ver.tgz
do_cpython_build $py_ver Python-$py_suffix

View File

@ -0,0 +1,10 @@
#!/bin/bash
set -ex
# Install MinGW-w64 for Windows cross-compilation
apt-get update
apt-get install -y g++-mingw-w64-x86-64-posix
echo "MinGW-w64 installed successfully"
x86_64-w64-mingw32-g++ --version

View File

@ -19,8 +19,8 @@ pip_install \
transformers==4.36.2
pip_install coloredlogs packaging
pip_install onnxruntime==1.23.0
pip_install onnxscript==0.5.3
pip_install onnxruntime==1.23.1
pip_install onnxscript==0.5.4
# Cache the transformers model to be used later by ONNX tests. We need to run the transformers
# package to download the model. By default, the model is cached at ~/.cache/huggingface/hub/

View File

@ -39,9 +39,13 @@ case ${DOCKER_TAG_PREFIX} in
DOCKER_GPU_BUILD_ARG=""
;;
rocm*)
# we want the patch version of 7.0 instead
if [[ "$GPU_ARCH_VERSION" == *"7.0"* ]]; then
GPU_ARCH_VERSION="${GPU_ARCH_VERSION}.2"
fi
# we want the patch version of 6.4 instead
if [[ "$GPU_ARCH_VERSION" == *"6.4"* ]]; then
GPU_ARCH_VERSION="${GPU_ARCH_VERSION}.2"
GPU_ARCH_VERSION="${GPU_ARCH_VERSION}.4"
fi
BASE_TARGET=rocm
GPU_IMAGE=rocm/dev-ubuntu-22.04:${GPU_ARCH_VERSION}-complete

View File

@ -75,9 +75,13 @@ case ${image} in
DOCKERFILE_SUFFIX="_cuda_aarch64"
;;
manylinux2_28-builder:rocm*)
# we want the patch version of 7.0 instead
if [[ "$GPU_ARCH_VERSION" == *"7.0"* ]]; then
GPU_ARCH_VERSION="${GPU_ARCH_VERSION}.2"
fi
# we want the patch version of 6.4 instead
if [[ "$GPU_ARCH_VERSION" == *"6.4"* ]]; then
GPU_ARCH_VERSION="${GPU_ARCH_VERSION}.2"
GPU_ARCH_VERSION="${GPU_ARCH_VERSION}.4"
fi
TARGET=rocm_final
MANY_LINUX_VERSION="2_28"

View File

@ -334,12 +334,12 @@ sympy==1.13.3
#Pinned versions:
#test that import:
onnx==1.18.0
onnx==1.19.1
#Description: Required by onnx tests, and mypy and test_public_bindings.py when checking torch.onnx._internal
#Pinned versions:
#test that import:
onnxscript==0.5.3
onnxscript==0.5.4
#Description: Required by mypy and test_public_bindings.py when checking torch.onnx._internal
#Pinned versions:
#test that import:

View File

@ -103,6 +103,11 @@ COPY ci_commit_pins/torchbench.txt torchbench.txt
RUN if [ -n "${INDUCTOR_BENCHMARKS}" ]; then bash ./install_inductor_benchmark_deps.sh; fi
RUN rm install_inductor_benchmark_deps.sh common_utils.sh timm.txt huggingface-requirements.txt torchbench.txt
ARG INSTALL_MINGW
COPY ./common/install_mingw.sh install_mingw.sh
RUN if [ -n "${INSTALL_MINGW}" ]; then bash ./install_mingw.sh; fi
RUN rm install_mingw.sh
ARG TRITON
ARG TRITON_CPU

View File

@ -57,8 +57,8 @@ def clone_external_repo(target: str, repo: str, dst: str = "", update_submodules
logger.info("Successfully cloned %s", target)
return r, commit
except GitCommandError as e:
logger.error("Git operation failed: %s", e)
except GitCommandError:
logger.exception("Git operation failed")
raise

View File

@ -6,7 +6,7 @@ dependencies = [
"GitPython==3.1.45",
"docker==7.1.0",
"pytest==7.3.2",
"uv==0.8.6"
"uv==0.9.5"
]
[tool.setuptools]

View File

@ -187,19 +187,22 @@ if [[ $CUDA_VERSION == 12* || $CUDA_VERSION == 13* ]]; then
export USE_CUFILE=0
else
DEPS_LIST+=(
"/usr/local/cuda/lib64/libnvToolsExt.so.1"
"/usr/local/cuda/lib64/libcublas.so.12"
"/usr/local/cuda/lib64/libcublasLt.so.12"
"/usr/local/cuda/lib64/libcudart.so.12"
"/usr/local/cuda/lib64/libnvrtc.so.12"
"/usr/local/cuda/extras/CUPTI/lib64/libcupti.so.12")
DEPS_SONAME+=(
"libnvToolsExt.so.1"
"libcublas.so.12"
"libcublasLt.so.12"
"libcudart.so.12"
"libnvrtc.so.12"
"libcupti.so.12")
if [[ $CUDA_VERSION != 12.9* ]]; then
DEPS_LIST+=("/usr/local/cuda/lib64/libnvToolsExt.so.1")
DEPS_SONAME+=("libnvToolsExt.so.1")
fi
fi
else
echo "Using nvidia libs from pypi."

View File

@ -485,6 +485,22 @@ test_inductor_aoti() {
/usr/bin/env "${TEST_ENVS[@]}" python test/run_test.py --cpp --verbose -i cpp/test_aoti_abi_check cpp/test_aoti_inference cpp/test_vec_half_AVX2 -dist=loadfile
}
test_inductor_aoti_cross_compile_for_windows() {
TEST_REPORTS_DIR=$(pwd)/test/test-reports
mkdir -p "$TEST_REPORTS_DIR"
# Set WINDOWS_CUDA_HOME environment variable
WINDOWS_CUDA_HOME="$(pwd)/win-torch-wheel-extracted"
export WINDOWS_CUDA_HOME
echo "WINDOWS_CUDA_HOME is set to: $WINDOWS_CUDA_HOME"
echo "Contents:"
ls -lah "$(pwd)/win-torch-wheel-extracted/lib/x64/" || true
python test/inductor/test_aoti_cross_compile_windows.py -k compile --package-dir "$TEST_REPORTS_DIR" --win-torch-lib-dir "$(pwd)/win-torch-wheel-extracted/torch/lib"
}
test_inductor_cpp_wrapper_shard() {
if [[ -z "$NUM_TEST_SHARDS" ]]; then
echo "NUM_TEST_SHARDS must be defined to run a Python test shard"
@ -900,7 +916,7 @@ test_inductor_set_cpu_affinity(){
export LD_PRELOAD="$JEMALLOC_LIB":"$LD_PRELOAD"
export MALLOC_CONF="oversize_threshold:1,background_thread:true,metadata_thp:auto,dirty_decay_ms:-1,muzzy_decay_ms:-1"
if [[ "${TEST_CONFIG}" != *aarch64* ]]; then
if [[ "$(uname -m)" != "aarch64" ]]; then
# Use Intel OpenMP for x86
IOMP_LIB="$(dirname "$(which python)")/../lib/libiomp5.so"
export LD_PRELOAD="$IOMP_LIB":"$LD_PRELOAD"
@ -914,7 +930,7 @@ test_inductor_set_cpu_affinity(){
cores=$((cpus / thread_per_core))
# Set number of cores to 16 on aarch64 for performance runs
if [[ "${TEST_CONFIG}" == *aarch64* && $cores -gt 16 ]]; then
if [[ "$(uname -m)" == "aarch64" && $cores -gt 16 ]]; then
cores=16
fi
export OMP_NUM_THREADS=$cores
@ -1615,6 +1631,7 @@ test_operator_benchmark() {
TEST_REPORTS_DIR=$(pwd)/test/test-reports
mkdir -p "$TEST_REPORTS_DIR"
TEST_DIR=$(pwd)
ARCH=$(uname -m)
test_inductor_set_cpu_affinity
@ -1629,7 +1646,7 @@ test_operator_benchmark() {
pip_install pandas
python check_perf_csv.py \
--actual "${TEST_REPORTS_DIR}/operator_benchmark_eager_float32_cpu.csv" \
--expected "expected_ci_operator_benchmark_eager_float32_cpu.csv"
--expected "${ARCH}_expected_ci_operator_benchmark_eager_float32_cpu.csv"
}
test_operator_microbenchmark() {
@ -1666,7 +1683,7 @@ if [[ "${TEST_CONFIG}" == *numpy_2* ]]; then
python -m pip install --pre numpy==2.0.2 scipy==1.13.1 numba==0.60.0
fi
python test/run_test.py --include dynamo/test_functions.py dynamo/test_unspec.py test_binary_ufuncs.py test_fake_tensor.py test_linalg.py test_numpy_interop.py test_tensor_creation_ops.py test_torch.py torch_np/test_basic.py
elif [[ "${BUILD_ENVIRONMENT}" == *aarch64* && "${TEST_CONFIG}" != *perf_cpu_aarch64* ]]; then
elif [[ "${BUILD_ENVIRONMENT}" == *aarch64* && "${TEST_CONFIG}" == 'default' ]]; then
test_linux_aarch64
elif [[ "${TEST_CONFIG}" == *backward* ]]; then
test_forward_backward_compatibility
@ -1717,6 +1734,8 @@ elif [[ "${TEST_CONFIG}" == *inductor-triton-cpu* ]]; then
test_inductor_triton_cpu
elif [[ "${TEST_CONFIG}" == *inductor-micro-benchmark* ]]; then
test_inductor_micro_benchmark
elif [[ "${TEST_CONFIG}" == *aoti_cross_compile_for_windows* ]]; then
test_inductor_aoti_cross_compile_for_windows
elif [[ "${TEST_CONFIG}" == *huggingface* ]]; then
install_torchvision
id=$((SHARD_NUMBER-1))

View File

@ -163,8 +163,13 @@ if [[ "$(uname)" != Darwin ]]; then
MEMORY_LIMIT_MAX_JOBS=12
NUM_CPUS=$(( $(nproc) - 2 ))
# Defaults here for **binary** linux builds so they can be changed in one place
export MAX_JOBS=${MAX_JOBS:-$(( ${NUM_CPUS} > ${MEMORY_LIMIT_MAX_JOBS} ? ${MEMORY_LIMIT_MAX_JOBS} : ${NUM_CPUS} ))}
if [[ "$(uname)" == Linux ]]; then
# Defaults here for **binary** linux builds so they can be changed in one place
export MAX_JOBS=${MAX_JOBS:-$(( ${NUM_CPUS} > ${MEMORY_LIMIT_MAX_JOBS} ? ${MEMORY_LIMIT_MAX_JOBS} : ${NUM_CPUS} ))}
else
# For other builds
export MAX_JOBS=${NUM_CPUS}
fi
cat >>"$envfile" <<EOL
export MAX_JOBS="${MAX_JOBS}"

View File

@ -0,0 +1,354 @@
# PyTorch Docstring Writing Guide
This skill describes how to write docstrings for functions and methods in the PyTorch project, following the conventions in `torch/_tensor_docs.py` and `torch/nn/functional.py`.
## General Principles
- Use **raw strings** (`r"""..."""`) for all docstrings to avoid issues with LaTeX/math backslashes
- Follow **Sphinx/reStructuredText** (reST) format for documentation
- Be **concise but complete** - include all essential information
- Always include **examples** when possible
- Use **cross-references** to related functions/classes
## Docstring Structure
### 1. Function Signature (First Line)
Start with the function signature showing all parameters:
```python
r"""function_name(param1, param2, *, kwarg1=default1, kwarg2=default2) -> ReturnType
```
**Notes:**
- Include the function name
- Show positional and keyword-only arguments (use `*` separator)
- Include default values
- Show return type annotation
- This line should NOT end with a period
### 2. Brief Description
Provide a one-line description of what the function does:
```python
r"""conv2d(input, weight, bias=None, stride=1, padding=0, dilation=1, groups=1) -> Tensor
Applies a 2D convolution over an input image composed of several input
planes.
```
### 3. Mathematical Formulas (if applicable)
Use Sphinx math directives for mathematical expressions:
```python
.. math::
\text{Softmax}(x_{i}) = \frac{\exp(x_i)}{\sum_j \exp(x_j)}
```
Or inline math: `:math:\`x^2\``
### 4. Cross-References
Link to related classes and functions using Sphinx roles:
- `:class:\`~torch.nn.ModuleName\`` - Link to a class
- `:func:\`torch.function_name\`` - Link to a function
- `:meth:\`~Tensor.method_name\`` - Link to a method
- `:attr:\`attribute_name\`` - Reference an attribute
- The `~` prefix shows only the last component (e.g., `Conv2d` instead of `torch.nn.Conv2d`)
**Example:**
```python
See :class:`~torch.nn.Conv2d` for details and output shape.
```
### 5. Notes and Warnings
Use admonitions for important information:
```python
.. note::
This function doesn't work directly with NLLLoss,
which expects the Log to be computed between the Softmax and itself.
Use log_softmax instead (it's faster and has better numerical properties).
.. warning::
:func:`new_tensor` always copies :attr:`data`. If you have a Tensor
``data`` and want to avoid a copy, use :func:`torch.Tensor.requires_grad_`
or :func:`torch.Tensor.detach`.
```
### 6. Args Section
Document all parameters with type annotations and descriptions:
```python
Args:
input (Tensor): input tensor of shape :math:`(\text{minibatch} , \text{in\_channels} , iH , iW)`
weight (Tensor): filters of shape :math:`(\text{out\_channels} , kH , kW)`
bias (Tensor, optional): optional bias tensor of shape :math:`(\text{out\_channels})`. Default: ``None``
stride (int or tuple): the stride of the convolving kernel. Can be a single number or a
tuple `(sH, sW)`. Default: 1
```
**Formatting rules:**
- Parameter name in **lowercase**
- Type in parentheses: `(Type)`, `(Type, optional)` for optional parameters
- Description follows the type
- For optional parameters, include "Default: ``value``" at the end
- Use double backticks for inline code: ``` ``None`` ```
- Indent continuation lines by 2 spaces
### 7. Keyword Args Section (if applicable)
Sometimes keyword arguments are documented separately:
```python
Keyword args:
dtype (:class:`torch.dtype`, optional): the desired type of returned tensor.
Default: if None, same :class:`torch.dtype` as this tensor.
device (:class:`torch.device`, optional): the desired device of returned tensor.
Default: if None, same :class:`torch.device` as this tensor.
requires_grad (bool, optional): If autograd should record operations on the
returned tensor. Default: ``False``.
```
### 8. Returns Section (if needed)
Document the return value:
```python
Returns:
Tensor: Sampled tensor of same shape as `logits` from the Gumbel-Softmax distribution.
If ``hard=True``, the returned samples will be one-hot, otherwise they will
be probability distributions that sum to 1 across `dim`.
```
Or simply include it in the function signature line if obvious from context.
### 9. Examples Section
Always include examples when possible:
```python
Examples::
>>> inputs = torch.randn(33, 16, 30)
>>> filters = torch.randn(20, 16, 5)
>>> F.conv1d(inputs, filters)
>>> # With square kernels and equal stride
>>> filters = torch.randn(8, 4, 3, 3)
>>> inputs = torch.randn(1, 4, 5, 5)
>>> F.conv2d(inputs, filters, padding=1)
```
**Formatting rules:**
- Use `Examples::` with double colon
- Use `>>>` prompt for Python code
- Include comments with `#` when helpful
- Show actual output when it helps understanding (indent without `>>>`)
### 10. External References
Link to papers or external documentation:
```python
.. _Link Name:
https://arxiv.org/abs/1611.00712
```
Reference them in text: ```See `Link Name`_```
## Method Types
### Native Python Functions
For regular Python functions, use a standard docstring:
```python
def relu(input: Tensor, inplace: bool = False) -> Tensor:
r"""relu(input, inplace=False) -> Tensor
Applies the rectified linear unit function element-wise. See
:class:`~torch.nn.ReLU` for more details.
"""
# implementation
```
### C-Bound Functions (using add_docstr)
For C-bound functions, use `_add_docstr`:
```python
conv1d = _add_docstr(
torch.conv1d,
r"""
conv1d(input, weight, bias=None, stride=1, padding=0, dilation=1, groups=1) -> Tensor
Applies a 1D convolution over an input signal composed of several input
planes.
See :class:`~torch.nn.Conv1d` for details and output shape.
Args:
input: input tensor of shape :math:`(\text{minibatch} , \text{in\_channels} , iW)`
weight: filters of shape :math:`(\text{out\_channels} , kW)`
...
""",
)
```
### In-Place Variants
For in-place operations (ending with `_`), reference the original:
```python
add_docstr_all(
"abs_",
r"""
abs_() -> Tensor
In-place version of :meth:`~Tensor.abs`
""",
)
```
### Alias Functions
For aliases, simply reference the original:
```python
add_docstr_all(
"absolute",
r"""
absolute() -> Tensor
Alias for :func:`abs`
""",
)
```
## Common Patterns
### Shape Documentation
Use LaTeX math notation for tensor shapes:
```python
:math:`(\text{minibatch} , \text{in\_channels} , iH , iW)`
```
### Reusable Argument Definitions
For commonly used arguments, define them once and reuse:
```python
common_args = parse_kwargs(
"""
dtype (:class:`torch.dtype`, optional): the desired type of returned tensor.
Default: if None, same as this tensor.
"""
)
# Then use with .format():
r"""
...
Keyword args:
{dtype}
{device}
""".format(**common_args)
```
### Template Insertion
Insert reproducibility notes or other common text:
```python
r"""
{tf32_note}
{cudnn_reproducibility_note}
""".format(**reproducibility_notes, **tf32_notes)
```
## Complete Example
Here's a complete example showing all elements:
```python
def gumbel_softmax(
logits: Tensor,
tau: float = 1,
hard: bool = False,
eps: float = 1e-10,
dim: int = -1,
) -> Tensor:
r"""
Sample from the Gumbel-Softmax distribution and optionally discretize.
Args:
logits (Tensor): `[..., num_features]` unnormalized log probabilities
tau (float): non-negative scalar temperature
hard (bool): if ``True``, the returned samples will be discretized as one-hot vectors,
but will be differentiated as if it is the soft sample in autograd. Default: ``False``
dim (int): A dimension along which softmax will be computed. Default: -1
Returns:
Tensor: Sampled tensor of same shape as `logits` from the Gumbel-Softmax distribution.
If ``hard=True``, the returned samples will be one-hot, otherwise they will
be probability distributions that sum to 1 across `dim`.
.. note::
This function is here for legacy reasons, may be removed from nn.Functional in the future.
Examples::
>>> logits = torch.randn(20, 32)
>>> # Sample soft categorical using reparametrization trick:
>>> F.gumbel_softmax(logits, tau=1, hard=False)
>>> # Sample hard categorical using "Straight-through" trick:
>>> F.gumbel_softmax(logits, tau=1, hard=True)
.. _Link 1:
https://arxiv.org/abs/1611.00712
"""
# implementation
```
## Quick Checklist
When writing a PyTorch docstring, ensure:
- [ ] Use raw string (`r"""`)
- [ ] Include function signature on first line
- [ ] Provide brief description
- [ ] Document all parameters in Args section with types
- [ ] Include default values for optional parameters
- [ ] Use Sphinx cross-references (`:func:`, `:class:`, `:meth:`)
- [ ] Add mathematical formulas if applicable
- [ ] Include at least one example in Examples section
- [ ] Add warnings/notes for important caveats
- [ ] Link to related module class with `:class:`
- [ ] Use proper math notation for tensor shapes
- [ ] Follow consistent formatting and indentation
## Common Sphinx Roles Reference
- `:class:\`~torch.nn.Module\`` - Class reference
- `:func:\`torch.function\`` - Function reference
- `:meth:\`~Tensor.method\`` - Method reference
- `:attr:\`attribute\`` - Attribute reference
- `:math:\`equation\`` - Inline math
- `:ref:\`label\`` - Internal reference
- ``` ``code`` ``` - Inline code (use double backticks)
## Additional Notes
- **Indentation**: Use 4 spaces for code, 2 spaces for continuation of parameter descriptions
- **Line length**: Try to keep lines under 100 characters when possible
- **Periods**: End sentences with periods, but not the signature line
- **Backticks**: Use double backticks for code: ``` ``True`` ``None`` ``False`` ```
- **Types**: Common types are `Tensor`, `int`, `float`, `bool`, `str`, `tuple`, `list`, etc.

View File

@ -7,16 +7,12 @@ max-line-length = 120
# C408 ignored because we like the dict keyword argument syntax
# E501 is not flexible enough, we're using B950 instead
ignore =
E203,E305,E402,E501,E704,E721,E741,F405,F841,F999,W503,W504,C408,E302,W291,E303,F824,
E203,E305,E402,E501,E704,E741,F405,F841,F999,W503,W504,C408,E302,W291,E303,F824,
# shebang has extra meaning in fbcode lints, so I think it's not worth trying
# to line this up with executable bit
EXE001,
# these ignores are from flake8-bugbear; please fix!
B007,B008,B017,B019,B023,B028,B903,B905,B906,B907,B908,B910
# these ignores are from flake8-comprehensions; please fix!
C407,
# these ignores are from flake8-logging-format; please fix!
G100,G101,G200
# these ignores are from flake8-simplify. please fix or ignore with commented reason
SIM105,SIM108,SIM110,SIM111,SIM113,SIM114,SIM115,SIM116,SIM117,SIM118,SIM119,SIM12,
# SIM104 is already covered by pyupgrade ruff

View File

@ -8,6 +8,7 @@ assignees: ''
---
> NOTE: Remember to label this issue with "`ci: sev`"
> If you want autorevert to be disabled, keep the ci: disable-autorevert label
<!-- Add the `merge blocking` label to this PR to prevent PRs from being merged while this issue is open -->

View File

@ -1,7 +1,7 @@
---
name: DISABLE AUTOREVERT
name: "D❌\U0001F519 ISABLE AUTOREVERT"
about: Disables autorevert when open
title: "❌​\U0001F519 [DISABLE AUTOREVERT]"
title: "[DISABLE AUTOREVERT]"
labels: 'ci: disable-autorevert'
assignees: ''

View File

@ -65,7 +65,7 @@ runs:
cd .ci/lumen_cli
python3 -m pip install -e .
)
MAX_JOBS="$(nproc --ignore=6)"
MAX_JOBS="$(nproc --ignore=10)"
export MAX_JOBS
# Split the comma-separated list and build each target

View File

@ -124,3 +124,10 @@ runs:
id: login-ecr
continue-on-error: true
uses: aws-actions/amazon-ecr-login@062b18b96a7aff071d4dc91bc00c4c1a7945b076 # v2.0.1
- name: Preserve github env variables for use in docker
shell: bash
run: |
env | grep '^GITHUB' >> "${RUNNER_TEMP}/github_env_${GITHUB_RUN_ID}"
env | grep '^CI' >> "${RUNNER_TEMP}/github_env_${GITHUB_RUN_ID}"
env | grep '^RUNNER' >> "${RUNNER_TEMP}/github_env_${GITHUB_RUN_ID}"

View File

@ -1 +1 @@
8ad2aa5d354d1bf432339113860185d5a5d1abbd
69bbe7363897764f9e758d851cd0340147d27f94

View File

@ -1 +1 @@
f5c6c2ec6490455e86f67b2a25c10390d60a27f7
1752fe6809b74921644866275ab80244b96e80bc

View File

@ -283,6 +283,9 @@ RUN --mount=type=bind,source=${TORCH_WHEELS_PATH},target=/dist \
uv pip install --system $(cat torch_build_versions.txt | xargs) --index-url https://download.pytorch.org/whl/nightly/cu$(echo $CUDA_VERSION | cut -d. -f1,2 | tr -d '.'); \
fi
RUN --mount=type=cache,target=/root/.cache/uv \
uv pip install --system --pre apache-tvm-ffi==0.1.0b15
# Install the vllm wheel from previous stage
RUN --mount=type=cache,target=/root/.cache/uv \
uv pip install --system /wheels/vllm/*.whl --verbose
@ -295,6 +298,8 @@ RUN --mount=type=cache,target=/root/.cache/uv \
ARG torch_cuda_arch_list='8.0;8.9;9.0a;10.0a;12.0'
ENV TORCH_CUDA_ARCH_LIST=${torch_cuda_arch_list}
# TODO(elainewy): remove this once vllm commit is updated, and install flashinfer from pip
# see https://github.com/pytorch/pytorch/pull/165274#issuecomment-3408531784
ARG FLASHINFER_GIT_REPO="https://github.com/flashinfer-ai/flashinfer.git"
ARG FLASHINFER_GIT_REF="v0.2.14.post1"

View File

@ -15,6 +15,11 @@
- "module: reinplacing"
then:
- "module: pt2-dispatcher"
- any:
- "vllm-compile"
then:
- "module: vllm"
- "oncall: pt2"
- any:
- "module: vmap"
then:
@ -27,10 +32,6 @@
- "module: pt2 optimizer"
then:
- "module: dynamo"
- any:
- "module: flex attention"
then:
- "module: higher order operators"
- any:
- "module: aotinductor"
then:

29
.github/labeler.yml vendored
View File

@ -133,3 +133,32 @@
"ciflow/vllm":
- .github/ci_commit_pins/vllm.txt
"ciflow/b200":
- test/test_matmul_cuda.py
- test/test_scaled_matmul_cuda.py
- test/inductor/test_fp8.py
- aten/src/ATen/native/cuda/Blas.cpp
- torch/**/*cublas*
- torch/_inductor/kernel/mm.py
- test/inductor/test_max_autotune.py
- third_party/fbgemm
"ciflow/h100":
- test/test_matmul_cuda.py
- test/test_scaled_matmul_cuda.py
- test/inductor/test_fp8.py
- aten/src/ATen/native/cuda/Blas.cpp
- torch/**/*cublas*
- torch/_inductor/kernel/mm.py
- test/inductor/test_max_autotune.py
- third_party/fbgemm
"ciflow/rocm":
- test/test_matmul_cuda.py
- test/test_scaled_matmul_cuda.py
- test/inductor/test_fp8.py
- aten/src/ATen/native/cuda/Blas.cpp
- torch/_inductor/kernel/mm.py
- test/inductor/test_max_autotune.py
- third_party/fbgemm

View File

@ -3,6 +3,7 @@ ciflow_tracking_issue: 64124
ciflow_push_tags:
- ciflow/b200
- ciflow/b200-symm-mem
- ciflow/b200-distributed
- ciflow/binaries
- ciflow/binaries_libtorch
- ciflow/binaries_wheel
@ -15,7 +16,8 @@ ciflow_push_tags:
- ciflow/inductor-micro-benchmark
- ciflow/inductor-micro-benchmark-cpu-x86
- ciflow/inductor-perf-compare
- ciflow/inductor-perf-test-nightly-rocm
- ciflow/inductor-perf-test-nightly-rocm-mi300
- ciflow/inductor-perf-test-nightly-rocm-mi355
- ciflow/inductor-perf-test-nightly-x86-zen
- ciflow/inductor-periodic
- ciflow/inductor-rocm
@ -31,6 +33,7 @@ ciflow_push_tags:
- ciflow/rocm
- ciflow/rocm-mi300
- ciflow/rocm-mi355
- ciflow/rocm-navi31
- ciflow/s390
- ciflow/slow
- ciflow/torchbench

View File

@ -79,21 +79,21 @@ PYTORCH_EXTRA_INSTALL_REQUIREMENTS = {
"nvidia-cufile-cu12==1.13.1.3; platform_system == 'Linux'"
),
"12.9": (
"nvidia-cuda-nvrtc-cu12==12.9.86; platform_system == 'Linux' and platform_machine == 'x86_64' | "
"nvidia-cuda-runtime-cu12==12.9.79; platform_system == 'Linux' and platform_machine == 'x86_64' | "
"nvidia-cuda-cupti-cu12==12.9.79; platform_system == 'Linux' and platform_machine == 'x86_64' | "
"nvidia-cudnn-cu12==9.10.2.21; platform_system == 'Linux' and platform_machine == 'x86_64' | "
"nvidia-cublas-cu12==12.9.1.4; platform_system == 'Linux' and platform_machine == 'x86_64' | "
"nvidia-cufft-cu12==11.4.1.4; platform_system == 'Linux' and platform_machine == 'x86_64' | "
"nvidia-curand-cu12==10.3.10.19; platform_system == 'Linux' and platform_machine == 'x86_64' | "
"nvidia-cusolver-cu12==11.7.5.82; platform_system == 'Linux' and platform_machine == 'x86_64' | "
"nvidia-cusparse-cu12==12.5.10.65; platform_system == 'Linux' and platform_machine == 'x86_64' | "
"nvidia-cusparselt-cu12==0.7.1; platform_system == 'Linux' and platform_machine == 'x86_64' | "
"nvidia-nccl-cu12==2.27.5; platform_system == 'Linux' and platform_machine == 'x86_64' | "
"nvidia-nvshmem-cu12==3.3.20; platform_system == 'Linux' and platform_machine == 'x86_64' | "
"nvidia-nvtx-cu12==12.9.79; platform_system == 'Linux' and platform_machine == 'x86_64' | "
"nvidia-nvjitlink-cu12==12.9.86; platform_system == 'Linux' and platform_machine == 'x86_64' | "
"nvidia-cufile-cu12==1.14.1.1; platform_system == 'Linux' and platform_machine == 'x86_64'"
"nvidia-cuda-nvrtc-cu12==12.9.86; platform_system == 'Linux' | "
"nvidia-cuda-runtime-cu12==12.9.79; platform_system == 'Linux' | "
"nvidia-cuda-cupti-cu12==12.9.79; platform_system == 'Linux' | "
"nvidia-cudnn-cu12==9.10.2.21; platform_system == 'Linux' | "
"nvidia-cublas-cu12==12.9.1.4; platform_system == 'Linux' | "
"nvidia-cufft-cu12==11.4.1.4; platform_system == 'Linux' | "
"nvidia-curand-cu12==10.3.10.19; platform_system == 'Linux' | "
"nvidia-cusolver-cu12==11.7.5.82; platform_system == 'Linux' | "
"nvidia-cusparse-cu12==12.5.10.65; platform_system == 'Linux' | "
"nvidia-cusparselt-cu12==0.7.1; platform_system == 'Linux' | "
"nvidia-nccl-cu12==2.27.5; platform_system == 'Linux' | "
"nvidia-nvshmem-cu12==3.3.20; platform_system == 'Linux' | "
"nvidia-nvtx-cu12==12.9.79; platform_system == 'Linux' | "
"nvidia-nvjitlink-cu12==12.9.86; platform_system == 'Linux' | "
"nvidia-cufile-cu12==1.14.1.1; platform_system == 'Linux'"
),
"13.0": (
"nvidia-cuda-nvrtc==13.0.48; platform_system == 'Linux' | "
@ -241,7 +241,11 @@ def generate_libtorch_matrix(
arches += CUDA_ARCHES
arches += ROCM_ARCHES
elif os == "windows":
arches += CUDA_ARCHES
# TODO (huydhn): Only build CUDA 12.9 for Linux. This logic is to be cleaned up
# in 2.10
windows_cuda_arches = CUDA_ARCHES.copy()
windows_cuda_arches.remove("12.9")
arches += windows_cuda_arches
if libtorch_variants is None:
libtorch_variants = [
"shared-with-deps",
@ -305,7 +309,11 @@ def generate_wheels_matrix(
if os == "linux":
arches += CUDA_ARCHES + ROCM_ARCHES + XPU_ARCHES
elif os == "windows":
arches += CUDA_ARCHES + XPU_ARCHES
# TODO (huydhn): Only build CUDA 12.9 for Linux. This logic is to be cleaned up
# in 2.10
windows_cuda_arches = CUDA_ARCHES.copy()
windows_cuda_arches.remove("12.9")
arches += windows_cuda_arches + XPU_ARCHES
elif os == "linux-aarch64":
# Separate new if as the CPU type is different and
# uses different build/test scripts

View File

@ -1092,7 +1092,7 @@ class GitHubPR:
editor = node["editor"]
return GitHubComment(
body_text=node["bodyText"],
created_at=node["createdAt"] if "createdAt" in node else "",
created_at=node.get("createdAt", ""),
author_login=node["author"]["login"],
author_url=node["author"].get("url", None),
author_association=node["authorAssociation"],

View File

@ -26,9 +26,8 @@ name: !{{ build_environment }}
- name: Setup Python
uses: actions/setup-python@v6
with:
# TODO: Removeme once 3.14 is out
# .4 version is min minor for 3.10, and also no-gil version of 3.13 needs at least 3.13.3
python-version: "!{{ (py_ver.strip('t') + '.4') if '3.14' not in py_ver else '3.14.0-rc.2' }}"
python-version: "!{{ py_ver.strip('t') + ('.4' if '3.14' not in py_ver else '.0') }}"
freethreaded: !{{ "true" if py_ver.endswith('t') else "false" }}
{%- endmacro %}

View File

@ -79,9 +79,9 @@ jobs:
runs-on: "windows-11-arm64-preview"
{%- else %}
{%- if branches == "nightly" %}
runs-on: "${{ needs.get-label-type.outputs.label-type }}windows.4xlarge"
runs-on: "${{ needs.get-label-type.outputs.label-type }}windows.12xlarge"
{%- else %}
runs-on: "${{ needs.get-label-type.outputs.label-type }}windows.4xlarge.nonephemeral"
runs-on: "${{ needs.get-label-type.outputs.label-type }}windows.12xlarge.nonephemeral"
{%- endif %}
{%- endif %}
timeout-minutes: !{{ common.timeout_minutes_windows_binary }}

View File

@ -37,7 +37,7 @@ on:
runner:
required: false
type: string
default: "linux.2xlarge"
default: "linux.c7i.2xlarge"
description: |
Label of the runner this job should run on.
test-matrix:

View File

@ -224,6 +224,46 @@ jobs:
continue-on-error: true
uses: ./.github/actions/download-td-artifacts
- name: Download Windows torch wheel for cross-compilation
if: matrix.win_torch_wheel_artifact != ''
uses: seemethere/download-artifact-s3@1da556a7aa0a088e3153970611f6c432d58e80e6 # v4.2.0
with:
name: ${{ matrix.win_torch_wheel_artifact }}
path: win-torch-wheel
- name: Extract Windows wheel and setup CUDA libraries
if: matrix.win_torch_wheel_artifact != ''
shell: bash
run: |
set -x
# Find the wheel file
WHEEL_FILE=$(find win-torch-wheel -name "*.whl" -type f | head -n 1)
if [ -z "$WHEEL_FILE" ]; then
echo "Error: No wheel file found in win-torch-wheel directory"
exit 1
fi
echo "Found wheel file: $WHEEL_FILE"
# Unzip the wheel file
unzip -q "$WHEEL_FILE" -d win-torch-wheel-extracted
echo "Extracted wheel contents"
# Setup CUDA libraries (cuda.lib and cudart.lib) directory
mkdir -p win-torch-wheel-extracted/lib/x64
if [ -f "win-torch-wheel/cuda.lib" ]; then
mv win-torch-wheel/cuda.lib win-torch-wheel-extracted/lib/x64/
echo "Moved cuda.lib to win-torch-wheel-extracted/lib/x64/"
fi
if [ -f "win-torch-wheel/cudart.lib" ]; then
mv win-torch-wheel/cudart.lib win-torch-wheel-extracted/lib/x64/
echo "Moved cudart.lib to win-torch-wheel-extracted/lib/x64/"
fi
# Verify CUDA libraries are present
echo "CUDA libraries:"
ls -la win-torch-wheel-extracted/lib/x64/ || echo "No CUDA libraries found"
- name: Parse ref
id: parse-ref
run: .github/scripts/parse_ref.py

View File

@ -168,6 +168,31 @@ jobs:
run: |
.ci/pytorch/win-build.sh
# Collect Windows torch libs and CUDA libs for cross-compilation
- name: Collect Windows CUDA libs for cross-compilation
if: steps.build.outcome != 'skipped' && inputs.cuda-version != 'cpu'
shell: bash
run: |
set -ex
# Create directory structure if does not exist
mkdir -p /c/${{ github.run_id }}/build-results
# Copy CUDA libs
CUDA_PATH="/c/Program Files/NVIDIA GPU Computing Toolkit/CUDA/v${{ inputs.cuda-version }}"
if [ -f "${CUDA_PATH}/lib/x64/cuda.lib" ]; then
cp "${CUDA_PATH}/lib/x64/cuda.lib" /c/${{ github.run_id }}/build-results/
fi
if [ -f "${CUDA_PATH}/lib/x64/cudart.lib" ]; then
cp "${CUDA_PATH}/lib/x64/cudart.lib" /c/${{ github.run_id }}/build-results/
fi
# List collected files
echo "Collected CUDA libs:"
ls -lah /c/${{ github.run_id }}/build-results/*.lib
# Upload to github so that people can click and download artifacts
- name: Upload artifacts to s3
if: steps.build.outcome != 'skipped'

62
.github/workflows/b200-distributed.yml vendored Normal file
View File

@ -0,0 +1,62 @@
name: CI for distributed tests on B200
on:
pull_request:
paths:
- .github/workflows/b200-distributed.yml
workflow_dispatch:
push:
tags:
- ciflow/b200-distributed/*
schedule:
- cron: 46 8 * * * # about 1:46am PDT
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }}-${{ github.event_name == 'workflow_dispatch' }}-${{ github.event_name == 'schedule' }}
cancel-in-progress: true
permissions:
id-token: write
contents: read
jobs:
get-label-type:
if: github.repository_owner == 'pytorch'
name: get-label-type
uses: pytorch/pytorch/.github/workflows/_runner-determinator.yml@main
with:
triggering_actor: ${{ github.triggering_actor }}
issue_owner: ${{ github.event.pull_request.user.login || github.event.issue.user.login }}
curr_branch: ${{ github.head_ref || github.ref_name }}
curr_ref_type: ${{ github.ref_type }}
linux-jammy-cuda12_8-py3_10-gcc11-build-distributed-b200:
name: linux-jammy-cuda12.8-py3.10-gcc11-build-distributed-b200
uses: ./.github/workflows/_linux-build.yml
needs: get-label-type
with:
runner_prefix: "${{ needs.get-label-type.outputs.label-type }}"
runner: linux.12xlarge.memory
build-environment: linux-jammy-cuda12.8-py3.10-gcc11-distributed-b200
docker-image-name: ci-image:pytorch-linux-jammy-cuda12.8-cudnn9-py3-gcc11
cuda-arch-list: '10.0'
test-matrix: |
{ include: [
{ config: "distributed", shard: 1, num_shards: 2, runner: "linux.dgx.b200.8" },
{ config: "distributed", shard: 2, num_shards: 2, runner: "linux.dgx.b200.8" },
]}
secrets: inherit
linux-jammy-cuda12_8-py3_10-gcc11-test-distributed-b200:
name: linux-jammy-cuda12.8-py3.10-gcc11-test-b200
uses: ./.github/workflows/_linux-test.yml
needs:
- linux-jammy-cuda12_8-py3_10-gcc11-build-distributed-b200
with:
timeout-minutes: 1200
build-environment: linux-jammy-cuda12.8-py3.10-gcc11-distributed-b200
docker-image: ${{ needs.linux-jammy-cuda12_8-py3_10-gcc11-build-distributed-b200.outputs.docker-image }}
test-matrix: ${{ needs.linux-jammy-cuda12_8-py3_10-gcc11-build-distributed-b200.outputs.test-matrix }}
aws-role-to-assume: arn:aws:iam::308535385114:role/gha_workflow_s3_and_ecr_read_only
secrets: inherit

View File

@ -27,9 +27,8 @@ jobs:
fail-fast: false
matrix:
python-version: [ '3.12' ]
# TODO (huydhn): Add cu130 after https://github.com/vllm-project/vllm/issues/24464 is resolved
platform: [ 'manylinux_2_28_x86_64', 'manylinux_2_28_aarch64' ]
device: [ 'cu128', 'cu129' ]
device: [ 'cu128', 'cu129', 'cu130' ]
include:
- platform: manylinux_2_28_x86_64
device: cu128
@ -39,6 +38,10 @@ jobs:
device: cu129
manylinux-image: 'pytorch/manylinux2_28-builder:cuda12.9'
runner: linux.12xlarge.memory
- platform: manylinux_2_28_x86_64
device: cu130
manylinux-image: 'pytorch/manylinux2_28-builder:cuda13.0'
runner: linux.12xlarge.memory
- platform: manylinux_2_28_aarch64
device: cu128
manylinux-image: 'pytorch/manylinuxaarch64-builder:cuda12.8'
@ -47,6 +50,11 @@ jobs:
device: cu129
manylinux-image: 'pytorch/manylinuxaarch64-builder:cuda12.9'
runner: linux.arm64.r7g.12xlarge.memory
exclude:
# TODO (huydhn): Add cu130 aarch64 once PyTorch is on 2.9+ and
# xformers is update to support 13.0
- platform: manylinux_2_28_aarch64
device: cu130
name: "Build ${{ matrix.device }} vLLM wheel on ${{ matrix.platform }}"
runs-on: ${{ matrix.runner }}
timeout-minutes: 480
@ -169,7 +177,12 @@ jobs:
fail-fast: false
matrix:
platform: [ 'manylinux_2_28_x86_64', 'manylinux_2_28_aarch64' ]
device: [ 'cu128', 'cu129' ]
device: [ 'cu128', 'cu129', 'cu130' ]
exclude:
# TODO (huydhn): Add cu130 aarch64 once PyTorch is on 2.9+ and
# xformers is update to support 13.0
- platform: manylinux_2_28_aarch64
device: cu130
env:
PLATFORM: ${{ matrix.platform }}
BUILD_DEVICE: ${{ matrix.device }}

View File

@ -224,7 +224,7 @@ jobs:
ALPINE_IMAGE: "arm64v8/alpine"
build_name: manywheel-py3_10-cuda-aarch64-12_9
build_environment: linux-aarch64-binary-manywheel
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.9.86; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-runtime-cu12==12.9.79; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-cupti-cu12==12.9.79; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cudnn-cu12==9.10.2.21; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cublas-cu12==12.9.1.4; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufft-cu12==11.4.1.4; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-curand-cu12==10.3.10.19; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusolver-cu12==11.7.5.82; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparse-cu12==12.5.10.65; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparselt-cu12==0.7.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nccl-cu12==2.27.5; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvshmem-cu12==3.3.20; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvtx-cu12==12.9.79; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvjitlink-cu12==12.9.86; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufile-cu12==1.14.1.1; platform_system == 'Linux' and platform_machine == 'x86_64'
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.9.86; platform_system == 'Linux' | nvidia-cuda-runtime-cu12==12.9.79; platform_system == 'Linux' | nvidia-cuda-cupti-cu12==12.9.79; platform_system == 'Linux' | nvidia-cudnn-cu12==9.10.2.21; platform_system == 'Linux' | nvidia-cublas-cu12==12.9.1.4; platform_system == 'Linux' | nvidia-cufft-cu12==11.4.1.4; platform_system == 'Linux' | nvidia-curand-cu12==10.3.10.19; platform_system == 'Linux' | nvidia-cusolver-cu12==11.7.5.82; platform_system == 'Linux' | nvidia-cusparse-cu12==12.5.10.65; platform_system == 'Linux' | nvidia-cusparselt-cu12==0.7.1; platform_system == 'Linux' | nvidia-nccl-cu12==2.27.5; platform_system == 'Linux' | nvidia-nvshmem-cu12==3.3.20; platform_system == 'Linux' | nvidia-nvtx-cu12==12.9.79; platform_system == 'Linux' | nvidia-nvjitlink-cu12==12.9.86; platform_system == 'Linux' | nvidia-cufile-cu12==1.14.1.1; platform_system == 'Linux'
timeout-minutes: 420
secrets:
github-token: ${{ secrets.GITHUB_TOKEN }}
@ -473,7 +473,7 @@ jobs:
ALPINE_IMAGE: "arm64v8/alpine"
build_name: manywheel-py3_11-cuda-aarch64-12_9
build_environment: linux-aarch64-binary-manywheel
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.9.86; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-runtime-cu12==12.9.79; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-cupti-cu12==12.9.79; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cudnn-cu12==9.10.2.21; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cublas-cu12==12.9.1.4; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufft-cu12==11.4.1.4; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-curand-cu12==10.3.10.19; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusolver-cu12==11.7.5.82; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparse-cu12==12.5.10.65; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparselt-cu12==0.7.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nccl-cu12==2.27.5; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvshmem-cu12==3.3.20; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvtx-cu12==12.9.79; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvjitlink-cu12==12.9.86; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufile-cu12==1.14.1.1; platform_system == 'Linux' and platform_machine == 'x86_64'
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.9.86; platform_system == 'Linux' | nvidia-cuda-runtime-cu12==12.9.79; platform_system == 'Linux' | nvidia-cuda-cupti-cu12==12.9.79; platform_system == 'Linux' | nvidia-cudnn-cu12==9.10.2.21; platform_system == 'Linux' | nvidia-cublas-cu12==12.9.1.4; platform_system == 'Linux' | nvidia-cufft-cu12==11.4.1.4; platform_system == 'Linux' | nvidia-curand-cu12==10.3.10.19; platform_system == 'Linux' | nvidia-cusolver-cu12==11.7.5.82; platform_system == 'Linux' | nvidia-cusparse-cu12==12.5.10.65; platform_system == 'Linux' | nvidia-cusparselt-cu12==0.7.1; platform_system == 'Linux' | nvidia-nccl-cu12==2.27.5; platform_system == 'Linux' | nvidia-nvshmem-cu12==3.3.20; platform_system == 'Linux' | nvidia-nvtx-cu12==12.9.79; platform_system == 'Linux' | nvidia-nvjitlink-cu12==12.9.86; platform_system == 'Linux' | nvidia-cufile-cu12==1.14.1.1; platform_system == 'Linux'
timeout-minutes: 420
secrets:
github-token: ${{ secrets.GITHUB_TOKEN }}
@ -722,7 +722,7 @@ jobs:
ALPINE_IMAGE: "arm64v8/alpine"
build_name: manywheel-py3_12-cuda-aarch64-12_9
build_environment: linux-aarch64-binary-manywheel
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.9.86; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-runtime-cu12==12.9.79; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-cupti-cu12==12.9.79; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cudnn-cu12==9.10.2.21; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cublas-cu12==12.9.1.4; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufft-cu12==11.4.1.4; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-curand-cu12==10.3.10.19; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusolver-cu12==11.7.5.82; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparse-cu12==12.5.10.65; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparselt-cu12==0.7.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nccl-cu12==2.27.5; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvshmem-cu12==3.3.20; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvtx-cu12==12.9.79; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvjitlink-cu12==12.9.86; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufile-cu12==1.14.1.1; platform_system == 'Linux' and platform_machine == 'x86_64'
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.9.86; platform_system == 'Linux' | nvidia-cuda-runtime-cu12==12.9.79; platform_system == 'Linux' | nvidia-cuda-cupti-cu12==12.9.79; platform_system == 'Linux' | nvidia-cudnn-cu12==9.10.2.21; platform_system == 'Linux' | nvidia-cublas-cu12==12.9.1.4; platform_system == 'Linux' | nvidia-cufft-cu12==11.4.1.4; platform_system == 'Linux' | nvidia-curand-cu12==10.3.10.19; platform_system == 'Linux' | nvidia-cusolver-cu12==11.7.5.82; platform_system == 'Linux' | nvidia-cusparse-cu12==12.5.10.65; platform_system == 'Linux' | nvidia-cusparselt-cu12==0.7.1; platform_system == 'Linux' | nvidia-nccl-cu12==2.27.5; platform_system == 'Linux' | nvidia-nvshmem-cu12==3.3.20; platform_system == 'Linux' | nvidia-nvtx-cu12==12.9.79; platform_system == 'Linux' | nvidia-nvjitlink-cu12==12.9.86; platform_system == 'Linux' | nvidia-cufile-cu12==1.14.1.1; platform_system == 'Linux'
timeout-minutes: 420
secrets:
github-token: ${{ secrets.GITHUB_TOKEN }}
@ -971,7 +971,7 @@ jobs:
ALPINE_IMAGE: "arm64v8/alpine"
build_name: manywheel-py3_13-cuda-aarch64-12_9
build_environment: linux-aarch64-binary-manywheel
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.9.86; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-runtime-cu12==12.9.79; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-cupti-cu12==12.9.79; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cudnn-cu12==9.10.2.21; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cublas-cu12==12.9.1.4; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufft-cu12==11.4.1.4; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-curand-cu12==10.3.10.19; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusolver-cu12==11.7.5.82; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparse-cu12==12.5.10.65; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparselt-cu12==0.7.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nccl-cu12==2.27.5; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvshmem-cu12==3.3.20; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvtx-cu12==12.9.79; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvjitlink-cu12==12.9.86; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufile-cu12==1.14.1.1; platform_system == 'Linux' and platform_machine == 'x86_64'
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.9.86; platform_system == 'Linux' | nvidia-cuda-runtime-cu12==12.9.79; platform_system == 'Linux' | nvidia-cuda-cupti-cu12==12.9.79; platform_system == 'Linux' | nvidia-cudnn-cu12==9.10.2.21; platform_system == 'Linux' | nvidia-cublas-cu12==12.9.1.4; platform_system == 'Linux' | nvidia-cufft-cu12==11.4.1.4; platform_system == 'Linux' | nvidia-curand-cu12==10.3.10.19; platform_system == 'Linux' | nvidia-cusolver-cu12==11.7.5.82; platform_system == 'Linux' | nvidia-cusparse-cu12==12.5.10.65; platform_system == 'Linux' | nvidia-cusparselt-cu12==0.7.1; platform_system == 'Linux' | nvidia-nccl-cu12==2.27.5; platform_system == 'Linux' | nvidia-nvshmem-cu12==3.3.20; platform_system == 'Linux' | nvidia-nvtx-cu12==12.9.79; platform_system == 'Linux' | nvidia-nvjitlink-cu12==12.9.86; platform_system == 'Linux' | nvidia-cufile-cu12==1.14.1.1; platform_system == 'Linux'
timeout-minutes: 420
secrets:
github-token: ${{ secrets.GITHUB_TOKEN }}
@ -1220,7 +1220,7 @@ jobs:
ALPINE_IMAGE: "arm64v8/alpine"
build_name: manywheel-py3_13t-cuda-aarch64-12_9
build_environment: linux-aarch64-binary-manywheel
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.9.86; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-runtime-cu12==12.9.79; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-cupti-cu12==12.9.79; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cudnn-cu12==9.10.2.21; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cublas-cu12==12.9.1.4; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufft-cu12==11.4.1.4; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-curand-cu12==10.3.10.19; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusolver-cu12==11.7.5.82; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparse-cu12==12.5.10.65; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparselt-cu12==0.7.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nccl-cu12==2.27.5; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvshmem-cu12==3.3.20; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvtx-cu12==12.9.79; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvjitlink-cu12==12.9.86; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufile-cu12==1.14.1.1; platform_system == 'Linux' and platform_machine == 'x86_64'
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.9.86; platform_system == 'Linux' | nvidia-cuda-runtime-cu12==12.9.79; platform_system == 'Linux' | nvidia-cuda-cupti-cu12==12.9.79; platform_system == 'Linux' | nvidia-cudnn-cu12==9.10.2.21; platform_system == 'Linux' | nvidia-cublas-cu12==12.9.1.4; platform_system == 'Linux' | nvidia-cufft-cu12==11.4.1.4; platform_system == 'Linux' | nvidia-curand-cu12==10.3.10.19; platform_system == 'Linux' | nvidia-cusolver-cu12==11.7.5.82; platform_system == 'Linux' | nvidia-cusparse-cu12==12.5.10.65; platform_system == 'Linux' | nvidia-cusparselt-cu12==0.7.1; platform_system == 'Linux' | nvidia-nccl-cu12==2.27.5; platform_system == 'Linux' | nvidia-nvshmem-cu12==3.3.20; platform_system == 'Linux' | nvidia-nvtx-cu12==12.9.79; platform_system == 'Linux' | nvidia-nvjitlink-cu12==12.9.86; platform_system == 'Linux' | nvidia-cufile-cu12==1.14.1.1; platform_system == 'Linux'
timeout-minutes: 420
secrets:
github-token: ${{ secrets.GITHUB_TOKEN }}
@ -1469,7 +1469,7 @@ jobs:
ALPINE_IMAGE: "arm64v8/alpine"
build_name: manywheel-py3_14-cuda-aarch64-12_9
build_environment: linux-aarch64-binary-manywheel
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.9.86; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-runtime-cu12==12.9.79; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-cupti-cu12==12.9.79; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cudnn-cu12==9.10.2.21; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cublas-cu12==12.9.1.4; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufft-cu12==11.4.1.4; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-curand-cu12==10.3.10.19; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusolver-cu12==11.7.5.82; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparse-cu12==12.5.10.65; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparselt-cu12==0.7.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nccl-cu12==2.27.5; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvshmem-cu12==3.3.20; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvtx-cu12==12.9.79; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvjitlink-cu12==12.9.86; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufile-cu12==1.14.1.1; platform_system == 'Linux' and platform_machine == 'x86_64'
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.9.86; platform_system == 'Linux' | nvidia-cuda-runtime-cu12==12.9.79; platform_system == 'Linux' | nvidia-cuda-cupti-cu12==12.9.79; platform_system == 'Linux' | nvidia-cudnn-cu12==9.10.2.21; platform_system == 'Linux' | nvidia-cublas-cu12==12.9.1.4; platform_system == 'Linux' | nvidia-cufft-cu12==11.4.1.4; platform_system == 'Linux' | nvidia-curand-cu12==10.3.10.19; platform_system == 'Linux' | nvidia-cusolver-cu12==11.7.5.82; platform_system == 'Linux' | nvidia-cusparse-cu12==12.5.10.65; platform_system == 'Linux' | nvidia-cusparselt-cu12==0.7.1; platform_system == 'Linux' | nvidia-nccl-cu12==2.27.5; platform_system == 'Linux' | nvidia-nvshmem-cu12==3.3.20; platform_system == 'Linux' | nvidia-nvtx-cu12==12.9.79; platform_system == 'Linux' | nvidia-nvjitlink-cu12==12.9.86; platform_system == 'Linux' | nvidia-cufile-cu12==1.14.1.1; platform_system == 'Linux'
timeout-minutes: 420
secrets:
github-token: ${{ secrets.GITHUB_TOKEN }}
@ -1718,7 +1718,7 @@ jobs:
ALPINE_IMAGE: "arm64v8/alpine"
build_name: manywheel-py3_14t-cuda-aarch64-12_9
build_environment: linux-aarch64-binary-manywheel
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.9.86; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-runtime-cu12==12.9.79; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-cupti-cu12==12.9.79; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cudnn-cu12==9.10.2.21; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cublas-cu12==12.9.1.4; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufft-cu12==11.4.1.4; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-curand-cu12==10.3.10.19; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusolver-cu12==11.7.5.82; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparse-cu12==12.5.10.65; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparselt-cu12==0.7.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nccl-cu12==2.27.5; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvshmem-cu12==3.3.20; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvtx-cu12==12.9.79; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvjitlink-cu12==12.9.86; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufile-cu12==1.14.1.1; platform_system == 'Linux' and platform_machine == 'x86_64'
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.9.86; platform_system == 'Linux' | nvidia-cuda-runtime-cu12==12.9.79; platform_system == 'Linux' | nvidia-cuda-cupti-cu12==12.9.79; platform_system == 'Linux' | nvidia-cudnn-cu12==9.10.2.21; platform_system == 'Linux' | nvidia-cublas-cu12==12.9.1.4; platform_system == 'Linux' | nvidia-cufft-cu12==11.4.1.4; platform_system == 'Linux' | nvidia-curand-cu12==10.3.10.19; platform_system == 'Linux' | nvidia-cusolver-cu12==11.7.5.82; platform_system == 'Linux' | nvidia-cusparse-cu12==12.5.10.65; platform_system == 'Linux' | nvidia-cusparselt-cu12==0.7.1; platform_system == 'Linux' | nvidia-nccl-cu12==2.27.5; platform_system == 'Linux' | nvidia-nvshmem-cu12==3.3.20; platform_system == 'Linux' | nvidia-nvtx-cu12==12.9.79; platform_system == 'Linux' | nvidia-nvjitlink-cu12==12.9.86; platform_system == 'Linux' | nvidia-cufile-cu12==1.14.1.1; platform_system == 'Linux'
timeout-minutes: 420
secrets:
github-token: ${{ secrets.GITHUB_TOKEN }}

View File

@ -259,7 +259,7 @@ jobs:
runner_prefix: "${{ needs.get-label-type.outputs.label-type }}"
build_name: manywheel-py3_10-cuda12_9
build_environment: linux-binary-manywheel
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.9.86; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-runtime-cu12==12.9.79; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-cupti-cu12==12.9.79; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cudnn-cu12==9.10.2.21; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cublas-cu12==12.9.1.4; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufft-cu12==11.4.1.4; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-curand-cu12==10.3.10.19; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusolver-cu12==11.7.5.82; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparse-cu12==12.5.10.65; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparselt-cu12==0.7.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nccl-cu12==2.27.5; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvshmem-cu12==3.3.20; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvtx-cu12==12.9.79; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvjitlink-cu12==12.9.86; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufile-cu12==1.14.1.1; platform_system == 'Linux' and platform_machine == 'x86_64'
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.9.86; platform_system == 'Linux' | nvidia-cuda-runtime-cu12==12.9.79; platform_system == 'Linux' | nvidia-cuda-cupti-cu12==12.9.79; platform_system == 'Linux' | nvidia-cudnn-cu12==9.10.2.21; platform_system == 'Linux' | nvidia-cublas-cu12==12.9.1.4; platform_system == 'Linux' | nvidia-cufft-cu12==11.4.1.4; platform_system == 'Linux' | nvidia-curand-cu12==10.3.10.19; platform_system == 'Linux' | nvidia-cusolver-cu12==11.7.5.82; platform_system == 'Linux' | nvidia-cusparse-cu12==12.5.10.65; platform_system == 'Linux' | nvidia-cusparselt-cu12==0.7.1; platform_system == 'Linux' | nvidia-nccl-cu12==2.27.5; platform_system == 'Linux' | nvidia-nvshmem-cu12==3.3.20; platform_system == 'Linux' | nvidia-nvtx-cu12==12.9.79; platform_system == 'Linux' | nvidia-nvjitlink-cu12==12.9.86; platform_system == 'Linux' | nvidia-cufile-cu12==1.14.1.1; platform_system == 'Linux'
secrets:
github-token: ${{ secrets.GITHUB_TOKEN }}
manywheel-py3_10-cuda12_9-test: # Testing
@ -925,7 +925,7 @@ jobs:
runner_prefix: "${{ needs.get-label-type.outputs.label-type }}"
build_name: manywheel-py3_11-cuda12_9
build_environment: linux-binary-manywheel
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.9.86; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-runtime-cu12==12.9.79; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-cupti-cu12==12.9.79; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cudnn-cu12==9.10.2.21; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cublas-cu12==12.9.1.4; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufft-cu12==11.4.1.4; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-curand-cu12==10.3.10.19; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusolver-cu12==11.7.5.82; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparse-cu12==12.5.10.65; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparselt-cu12==0.7.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nccl-cu12==2.27.5; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvshmem-cu12==3.3.20; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvtx-cu12==12.9.79; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvjitlink-cu12==12.9.86; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufile-cu12==1.14.1.1; platform_system == 'Linux' and platform_machine == 'x86_64'
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.9.86; platform_system == 'Linux' | nvidia-cuda-runtime-cu12==12.9.79; platform_system == 'Linux' | nvidia-cuda-cupti-cu12==12.9.79; platform_system == 'Linux' | nvidia-cudnn-cu12==9.10.2.21; platform_system == 'Linux' | nvidia-cublas-cu12==12.9.1.4; platform_system == 'Linux' | nvidia-cufft-cu12==11.4.1.4; platform_system == 'Linux' | nvidia-curand-cu12==10.3.10.19; platform_system == 'Linux' | nvidia-cusolver-cu12==11.7.5.82; platform_system == 'Linux' | nvidia-cusparse-cu12==12.5.10.65; platform_system == 'Linux' | nvidia-cusparselt-cu12==0.7.1; platform_system == 'Linux' | nvidia-nccl-cu12==2.27.5; platform_system == 'Linux' | nvidia-nvshmem-cu12==3.3.20; platform_system == 'Linux' | nvidia-nvtx-cu12==12.9.79; platform_system == 'Linux' | nvidia-nvjitlink-cu12==12.9.86; platform_system == 'Linux' | nvidia-cufile-cu12==1.14.1.1; platform_system == 'Linux'
secrets:
github-token: ${{ secrets.GITHUB_TOKEN }}
manywheel-py3_11-cuda12_9-test: # Testing
@ -1591,7 +1591,7 @@ jobs:
runner_prefix: "${{ needs.get-label-type.outputs.label-type }}"
build_name: manywheel-py3_12-cuda12_9
build_environment: linux-binary-manywheel
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.9.86; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-runtime-cu12==12.9.79; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-cupti-cu12==12.9.79; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cudnn-cu12==9.10.2.21; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cublas-cu12==12.9.1.4; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufft-cu12==11.4.1.4; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-curand-cu12==10.3.10.19; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusolver-cu12==11.7.5.82; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparse-cu12==12.5.10.65; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparselt-cu12==0.7.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nccl-cu12==2.27.5; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvshmem-cu12==3.3.20; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvtx-cu12==12.9.79; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvjitlink-cu12==12.9.86; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufile-cu12==1.14.1.1; platform_system == 'Linux' and platform_machine == 'x86_64'
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.9.86; platform_system == 'Linux' | nvidia-cuda-runtime-cu12==12.9.79; platform_system == 'Linux' | nvidia-cuda-cupti-cu12==12.9.79; platform_system == 'Linux' | nvidia-cudnn-cu12==9.10.2.21; platform_system == 'Linux' | nvidia-cublas-cu12==12.9.1.4; platform_system == 'Linux' | nvidia-cufft-cu12==11.4.1.4; platform_system == 'Linux' | nvidia-curand-cu12==10.3.10.19; platform_system == 'Linux' | nvidia-cusolver-cu12==11.7.5.82; platform_system == 'Linux' | nvidia-cusparse-cu12==12.5.10.65; platform_system == 'Linux' | nvidia-cusparselt-cu12==0.7.1; platform_system == 'Linux' | nvidia-nccl-cu12==2.27.5; platform_system == 'Linux' | nvidia-nvshmem-cu12==3.3.20; platform_system == 'Linux' | nvidia-nvtx-cu12==12.9.79; platform_system == 'Linux' | nvidia-nvjitlink-cu12==12.9.86; platform_system == 'Linux' | nvidia-cufile-cu12==1.14.1.1; platform_system == 'Linux'
secrets:
github-token: ${{ secrets.GITHUB_TOKEN }}
manywheel-py3_12-cuda12_9-test: # Testing
@ -2257,7 +2257,7 @@ jobs:
runner_prefix: "${{ needs.get-label-type.outputs.label-type }}"
build_name: manywheel-py3_13-cuda12_9
build_environment: linux-binary-manywheel
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.9.86; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-runtime-cu12==12.9.79; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-cupti-cu12==12.9.79; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cudnn-cu12==9.10.2.21; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cublas-cu12==12.9.1.4; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufft-cu12==11.4.1.4; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-curand-cu12==10.3.10.19; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusolver-cu12==11.7.5.82; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparse-cu12==12.5.10.65; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparselt-cu12==0.7.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nccl-cu12==2.27.5; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvshmem-cu12==3.3.20; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvtx-cu12==12.9.79; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvjitlink-cu12==12.9.86; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufile-cu12==1.14.1.1; platform_system == 'Linux' and platform_machine == 'x86_64'
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.9.86; platform_system == 'Linux' | nvidia-cuda-runtime-cu12==12.9.79; platform_system == 'Linux' | nvidia-cuda-cupti-cu12==12.9.79; platform_system == 'Linux' | nvidia-cudnn-cu12==9.10.2.21; platform_system == 'Linux' | nvidia-cublas-cu12==12.9.1.4; platform_system == 'Linux' | nvidia-cufft-cu12==11.4.1.4; platform_system == 'Linux' | nvidia-curand-cu12==10.3.10.19; platform_system == 'Linux' | nvidia-cusolver-cu12==11.7.5.82; platform_system == 'Linux' | nvidia-cusparse-cu12==12.5.10.65; platform_system == 'Linux' | nvidia-cusparselt-cu12==0.7.1; platform_system == 'Linux' | nvidia-nccl-cu12==2.27.5; platform_system == 'Linux' | nvidia-nvshmem-cu12==3.3.20; platform_system == 'Linux' | nvidia-nvtx-cu12==12.9.79; platform_system == 'Linux' | nvidia-nvjitlink-cu12==12.9.86; platform_system == 'Linux' | nvidia-cufile-cu12==1.14.1.1; platform_system == 'Linux'
secrets:
github-token: ${{ secrets.GITHUB_TOKEN }}
manywheel-py3_13-cuda12_9-test: # Testing
@ -2923,7 +2923,7 @@ jobs:
runner_prefix: "${{ needs.get-label-type.outputs.label-type }}"
build_name: manywheel-py3_13t-cuda12_9
build_environment: linux-binary-manywheel
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.9.86; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-runtime-cu12==12.9.79; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-cupti-cu12==12.9.79; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cudnn-cu12==9.10.2.21; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cublas-cu12==12.9.1.4; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufft-cu12==11.4.1.4; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-curand-cu12==10.3.10.19; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusolver-cu12==11.7.5.82; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparse-cu12==12.5.10.65; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparselt-cu12==0.7.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nccl-cu12==2.27.5; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvshmem-cu12==3.3.20; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvtx-cu12==12.9.79; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvjitlink-cu12==12.9.86; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufile-cu12==1.14.1.1; platform_system == 'Linux' and platform_machine == 'x86_64'
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.9.86; platform_system == 'Linux' | nvidia-cuda-runtime-cu12==12.9.79; platform_system == 'Linux' | nvidia-cuda-cupti-cu12==12.9.79; platform_system == 'Linux' | nvidia-cudnn-cu12==9.10.2.21; platform_system == 'Linux' | nvidia-cublas-cu12==12.9.1.4; platform_system == 'Linux' | nvidia-cufft-cu12==11.4.1.4; platform_system == 'Linux' | nvidia-curand-cu12==10.3.10.19; platform_system == 'Linux' | nvidia-cusolver-cu12==11.7.5.82; platform_system == 'Linux' | nvidia-cusparse-cu12==12.5.10.65; platform_system == 'Linux' | nvidia-cusparselt-cu12==0.7.1; platform_system == 'Linux' | nvidia-nccl-cu12==2.27.5; platform_system == 'Linux' | nvidia-nvshmem-cu12==3.3.20; platform_system == 'Linux' | nvidia-nvtx-cu12==12.9.79; platform_system == 'Linux' | nvidia-nvjitlink-cu12==12.9.86; platform_system == 'Linux' | nvidia-cufile-cu12==1.14.1.1; platform_system == 'Linux'
secrets:
github-token: ${{ secrets.GITHUB_TOKEN }}
manywheel-py3_13t-cuda12_9-test: # Testing
@ -3589,7 +3589,7 @@ jobs:
runner_prefix: "${{ needs.get-label-type.outputs.label-type }}"
build_name: manywheel-py3_14-cuda12_9
build_environment: linux-binary-manywheel
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.9.86; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-runtime-cu12==12.9.79; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-cupti-cu12==12.9.79; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cudnn-cu12==9.10.2.21; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cublas-cu12==12.9.1.4; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufft-cu12==11.4.1.4; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-curand-cu12==10.3.10.19; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusolver-cu12==11.7.5.82; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparse-cu12==12.5.10.65; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparselt-cu12==0.7.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nccl-cu12==2.27.5; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvshmem-cu12==3.3.20; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvtx-cu12==12.9.79; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvjitlink-cu12==12.9.86; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufile-cu12==1.14.1.1; platform_system == 'Linux' and platform_machine == 'x86_64'
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.9.86; platform_system == 'Linux' | nvidia-cuda-runtime-cu12==12.9.79; platform_system == 'Linux' | nvidia-cuda-cupti-cu12==12.9.79; platform_system == 'Linux' | nvidia-cudnn-cu12==9.10.2.21; platform_system == 'Linux' | nvidia-cublas-cu12==12.9.1.4; platform_system == 'Linux' | nvidia-cufft-cu12==11.4.1.4; platform_system == 'Linux' | nvidia-curand-cu12==10.3.10.19; platform_system == 'Linux' | nvidia-cusolver-cu12==11.7.5.82; platform_system == 'Linux' | nvidia-cusparse-cu12==12.5.10.65; platform_system == 'Linux' | nvidia-cusparselt-cu12==0.7.1; platform_system == 'Linux' | nvidia-nccl-cu12==2.27.5; platform_system == 'Linux' | nvidia-nvshmem-cu12==3.3.20; platform_system == 'Linux' | nvidia-nvtx-cu12==12.9.79; platform_system == 'Linux' | nvidia-nvjitlink-cu12==12.9.86; platform_system == 'Linux' | nvidia-cufile-cu12==1.14.1.1; platform_system == 'Linux'
secrets:
github-token: ${{ secrets.GITHUB_TOKEN }}
manywheel-py3_14-cuda12_9-test: # Testing
@ -4255,7 +4255,7 @@ jobs:
runner_prefix: "${{ needs.get-label-type.outputs.label-type }}"
build_name: manywheel-py3_14t-cuda12_9
build_environment: linux-binary-manywheel
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.9.86; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-runtime-cu12==12.9.79; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-cupti-cu12==12.9.79; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cudnn-cu12==9.10.2.21; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cublas-cu12==12.9.1.4; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufft-cu12==11.4.1.4; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-curand-cu12==10.3.10.19; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusolver-cu12==11.7.5.82; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparse-cu12==12.5.10.65; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparselt-cu12==0.7.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nccl-cu12==2.27.5; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvshmem-cu12==3.3.20; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvtx-cu12==12.9.79; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvjitlink-cu12==12.9.86; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufile-cu12==1.14.1.1; platform_system == 'Linux' and platform_machine == 'x86_64'
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.9.86; platform_system == 'Linux' | nvidia-cuda-runtime-cu12==12.9.79; platform_system == 'Linux' | nvidia-cuda-cupti-cu12==12.9.79; platform_system == 'Linux' | nvidia-cudnn-cu12==9.10.2.21; platform_system == 'Linux' | nvidia-cublas-cu12==12.9.1.4; platform_system == 'Linux' | nvidia-cufft-cu12==11.4.1.4; platform_system == 'Linux' | nvidia-curand-cu12==10.3.10.19; platform_system == 'Linux' | nvidia-cusolver-cu12==11.7.5.82; platform_system == 'Linux' | nvidia-cusparse-cu12==12.5.10.65; platform_system == 'Linux' | nvidia-cusparselt-cu12==0.7.1; platform_system == 'Linux' | nvidia-nccl-cu12==2.27.5; platform_system == 'Linux' | nvidia-nvshmem-cu12==3.3.20; platform_system == 'Linux' | nvidia-nvtx-cu12==12.9.79; platform_system == 'Linux' | nvidia-nvjitlink-cu12==12.9.86; platform_system == 'Linux' | nvidia-cufile-cu12==1.14.1.1; platform_system == 'Linux'
secrets:
github-token: ${{ secrets.GITHUB_TOKEN }}
manywheel-py3_14t-cuda12_9-test: # Testing

View File

@ -63,7 +63,6 @@ jobs:
- name: Setup Python
uses: actions/setup-python@v6
with:
# TODO: Removeme once 3.14 is out
# .4 version is min minor for 3.10, and also no-gil version of 3.13 needs at least 3.13.3
python-version: "3.10.4"
freethreaded: false

View File

@ -59,7 +59,6 @@ jobs:
- name: Setup Python
uses: actions/setup-python@v6
with:
# TODO: Removeme once 3.14 is out
# .4 version is min minor for 3.10, and also no-gil version of 3.13 needs at least 3.13.3
python-version: "3.10.4"
freethreaded: false
@ -169,7 +168,6 @@ jobs:
- name: Setup Python
uses: actions/setup-python@v6
with:
# TODO: Removeme once 3.14 is out
# .4 version is min minor for 3.10, and also no-gil version of 3.13 needs at least 3.13.3
python-version: "3.11.4"
freethreaded: false
@ -279,7 +277,6 @@ jobs:
- name: Setup Python
uses: actions/setup-python@v6
with:
# TODO: Removeme once 3.14 is out
# .4 version is min minor for 3.10, and also no-gil version of 3.13 needs at least 3.13.3
python-version: "3.12.4"
freethreaded: false
@ -389,7 +386,6 @@ jobs:
- name: Setup Python
uses: actions/setup-python@v6
with:
# TODO: Removeme once 3.14 is out
# .4 version is min minor for 3.10, and also no-gil version of 3.13 needs at least 3.13.3
python-version: "3.13.4"
freethreaded: false
@ -499,7 +495,6 @@ jobs:
- name: Setup Python
uses: actions/setup-python@v6
with:
# TODO: Removeme once 3.14 is out
# .4 version is min minor for 3.10, and also no-gil version of 3.13 needs at least 3.13.3
python-version: "3.13.4"
freethreaded: true
@ -609,9 +604,8 @@ jobs:
- name: Setup Python
uses: actions/setup-python@v6
with:
# TODO: Removeme once 3.14 is out
# .4 version is min minor for 3.10, and also no-gil version of 3.13 needs at least 3.13.3
python-version: "3.14.0-rc.2"
python-version: "3.14.0"
freethreaded: false
- name: Checkout PyTorch
uses: actions/checkout@v4
@ -719,9 +713,8 @@ jobs:
- name: Setup Python
uses: actions/setup-python@v6
with:
# TODO: Removeme once 3.14 is out
# .4 version is min minor for 3.10, and also no-gil version of 3.13 needs at least 3.13.3
python-version: "3.14.0-rc.2"
python-version: "3.14.0"
freethreaded: true
- name: Checkout PyTorch
uses: actions/checkout@v4

View File

@ -44,7 +44,7 @@ jobs:
libtorch-cpu-shared-with-deps-debug-build:
if: ${{ github.repository_owner == 'pytorch' }}
needs: get-label-type
runs-on: "${{ needs.get-label-type.outputs.label-type }}windows.4xlarge"
runs-on: "${{ needs.get-label-type.outputs.label-type }}windows.12xlarge"
timeout-minutes: 360
env:
PYTORCH_ROOT: ${{ github.workspace }}/pytorch
@ -291,7 +291,7 @@ jobs:
libtorch-cuda12_6-shared-with-deps-debug-build:
if: ${{ github.repository_owner == 'pytorch' }}
needs: get-label-type
runs-on: "${{ needs.get-label-type.outputs.label-type }}windows.4xlarge"
runs-on: "${{ needs.get-label-type.outputs.label-type }}windows.12xlarge"
timeout-minutes: 360
env:
PYTORCH_ROOT: ${{ github.workspace }}/pytorch
@ -541,7 +541,7 @@ jobs:
libtorch-cuda12_8-shared-with-deps-debug-build:
if: ${{ github.repository_owner == 'pytorch' }}
needs: get-label-type
runs-on: "${{ needs.get-label-type.outputs.label-type }}windows.4xlarge"
runs-on: "${{ needs.get-label-type.outputs.label-type }}windows.12xlarge"
timeout-minutes: 360
env:
PYTORCH_ROOT: ${{ github.workspace }}/pytorch
@ -788,260 +788,10 @@ jobs:
secrets:
github-token: ${{ secrets.GITHUB_TOKEN }}
uses: ./.github/workflows/_binary-upload.yml
libtorch-cuda12_9-shared-with-deps-debug-build:
if: ${{ github.repository_owner == 'pytorch' }}
needs: get-label-type
runs-on: "${{ needs.get-label-type.outputs.label-type }}windows.4xlarge"
timeout-minutes: 360
env:
PYTORCH_ROOT: ${{ github.workspace }}/pytorch
PACKAGE_TYPE: libtorch
# TODO: This is a legacy variable that we eventually want to get rid of in
# favor of GPU_ARCH_VERSION
DESIRED_CUDA: cu129
GPU_ARCH_VERSION: "12.9"
GPU_ARCH_TYPE: cuda
SKIP_ALL_TESTS: 1
LIBTORCH_CONFIG: debug
LIBTORCH_VARIANT: shared-with-deps
# This is a dummy value for libtorch to work correctly with our batch scripts
# without this value pip does not get installed for some reason
DESIRED_PYTHON: "3.10"
steps:
# NOTE: These environment variables are put here so that they can be applied on every job equally
# They are also here because setting them at a workflow level doesn't give us access to the
# runner.temp variable, which we need.
- name: Populate binary env
shell: bash
run: |
echo "BINARY_ENV_FILE=${RUNNER_TEMP}/env" >> "${GITHUB_ENV}"
echo "PYTORCH_FINAL_PACKAGE_DIR=${RUNNER_TEMP}/artifacts" >> "${GITHUB_ENV}"
echo "WIN_PACKAGE_WORK_DIR=${RUNNER_TEMP}"
- name: Display EC2 information
shell: bash
run: |
set -euo pipefail
function get_ec2_metadata() {
# Pulled from instance metadata endpoint for EC2
# see https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/instancedata-data-retrieval.html
category=$1
curl -H "X-aws-ec2-metadata-token: $(curl -s -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")" -fsSL "http://169.254.169.254/latest/meta-data/${category}"
}
echo "ami-id: $(get_ec2_metadata ami-id)"
echo "instance-id: $(get_ec2_metadata instance-id)"
echo "instance-type: $(get_ec2_metadata instance-type)"
echo "system info $(uname -a)"
- name: "[FB EMPLOYEES] Enable SSH (Click me for login details)"
uses: pytorch/test-infra/.github/actions/setup-ssh@main
continue-on-error: true
with:
github-secret: ${{ secrets.GITHUB_TOKEN }}
- name: Enable git long paths and symlinks on Windows and disable fsmonitor daemon
shell: bash
run: |
git config --global core.longpaths true
git config --global core.symlinks true
# https://git-scm.com/docs/git-fsmonitor--daemon. The daemon could lock
# the directory on Windows and prevent GHA from checking out as reported
# in https://github.com/actions/checkout/issues/1018
git config --global core.fsmonitor false
# Needed for binary builds, see: https://github.com/pytorch/pytorch/issues/73339#issuecomment-1058981560
- name: Enable long paths on Windows
shell: powershell
run: |
Set-ItemProperty -Path "HKLM:\\SYSTEM\CurrentControlSet\Control\FileSystem" -Name "LongPathsEnabled" -Value 1
# Since it's just a defensive command, the workflow should continue even the command fails. This step can be
# removed once Windows Defender is removed from the AMI
- name: Disables Windows Defender scheduled and real-time scanning for files in directories used by PyTorch
continue-on-error: true
shell: powershell
run: |
Add-MpPreference -ExclusionPath $(Get-Location).tostring(),$Env:TEMP -ErrorAction Ignore
# Let's both exclude the path and disable Windows Defender completely just to be sure
# that it doesn't interfere
Set-MpPreference -DisableRealtimeMonitoring $True -ErrorAction Ignore
- name: Checkout PyTorch
uses: actions/checkout@v4
with:
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
submodules: recursive
path: pytorch
show-progress: false
- name: Clean PyTorch checkout
run: |
# Remove any artifacts from the previous checkouts
git clean -fxd
working-directory: pytorch
- name: Populate binary env
shell: bash
run: |
"${PYTORCH_ROOT}/.circleci/scripts/binary_populate_env.sh"
- name: Build PyTorch binary
shell: bash
run: |
"${PYTORCH_ROOT}/.circleci/scripts/binary_windows_build.sh"
- uses: actions/upload-artifact@v4.4.0
if: always()
with:
name: libtorch-cuda12_9-shared-with-deps-debug
retention-days: 14
if-no-files-found: error
path: "${{ env.PYTORCH_FINAL_PACKAGE_DIR }}"
- name: Wait until all sessions have drained
shell: powershell
working-directory: pytorch
if: always()
timeout-minutes: 120
run: |
.github\scripts\wait_for_ssh_to_drain.ps1
- name: Kill active ssh sessions if still around (Useful if workflow was cancelled)
shell: powershell
working-directory: pytorch
if: always()
run: |
.github\scripts\kill_active_ssh_sessions.ps1
libtorch-cuda12_9-shared-with-deps-debug-test: # Testing
if: ${{ github.repository_owner == 'pytorch' }}
needs:
- libtorch-cuda12_9-shared-with-deps-debug-build
- get-label-type
runs-on: "${{ needs.get-label-type.outputs.label-type }}windows.g4dn.xlarge"
timeout-minutes: 360
env:
PYTORCH_ROOT: ${{ github.workspace }}/pytorch
PACKAGE_TYPE: libtorch
# TODO: This is a legacy variable that we eventually want to get rid of in
# favor of GPU_ARCH_VERSION
DESIRED_CUDA: cu129
GPU_ARCH_VERSION: "12.9"
GPU_ARCH_TYPE: cuda
SKIP_ALL_TESTS: 1
LIBTORCH_CONFIG: debug
LIBTORCH_VARIANT: shared-with-deps
# This is a dummy value for libtorch to work correctly with our batch scripts
# without this value pip does not get installed for some reason
DESIRED_PYTHON: "3.10"
steps:
- name: Display EC2 information
shell: bash
run: |
set -euo pipefail
function get_ec2_metadata() {
# Pulled from instance metadata endpoint for EC2
# see https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/instancedata-data-retrieval.html
category=$1
curl -H "X-aws-ec2-metadata-token: $(curl -s -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")" -fsSL "http://169.254.169.254/latest/meta-data/${category}"
}
echo "ami-id: $(get_ec2_metadata ami-id)"
echo "instance-id: $(get_ec2_metadata instance-id)"
echo "instance-type: $(get_ec2_metadata instance-type)"
echo "system info $(uname -a)"
- name: "[FB EMPLOYEES] Enable SSH (Click me for login details)"
uses: pytorch/test-infra/.github/actions/setup-ssh@main
continue-on-error: true
with:
github-secret: ${{ secrets.GITHUB_TOKEN }}
- name: Enable git long paths and symlinks on Windows and disable fsmonitor daemon
shell: bash
run: |
git config --global core.longpaths true
git config --global core.symlinks true
# https://git-scm.com/docs/git-fsmonitor--daemon. The daemon could lock
# the directory on Windows and prevent GHA from checking out as reported
# in https://github.com/actions/checkout/issues/1018
git config --global core.fsmonitor false
# Needed for binary builds, see: https://github.com/pytorch/pytorch/issues/73339#issuecomment-1058981560
- name: Enable long paths on Windows
shell: powershell
run: |
Set-ItemProperty -Path "HKLM:\\SYSTEM\CurrentControlSet\Control\FileSystem" -Name "LongPathsEnabled" -Value 1
# Since it's just a defensive command, the workflow should continue even the command fails. This step can be
# removed once Windows Defender is removed from the AMI
- name: Disables Windows Defender scheduled and real-time scanning for files in directories used by PyTorch
continue-on-error: true
shell: powershell
run: |
Add-MpPreference -ExclusionPath $(Get-Location).tostring(),$Env:TEMP -ErrorAction Ignore
# Let's both exclude the path and disable Windows Defender completely just to be sure
# that it doesn't interfere
Set-MpPreference -DisableRealtimeMonitoring $True -ErrorAction Ignore
- name: Checkout PyTorch
uses: actions/checkout@v4
with:
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
submodules: recursive
path: pytorch
show-progress: false
- name: Clean PyTorch checkout
run: |
# Remove any artifacts from the previous checkouts
git clean -fxd
working-directory: pytorch
# NOTE: These environment variables are put here so that they can be applied on every job equally
# They are also here because setting them at a workflow level doesn't give us access to the
# runner.temp variable, which we need.
- name: Populate binary env
shell: bash
run: |
echo "BINARY_ENV_FILE=${RUNNER_TEMP}/env" >> "${GITHUB_ENV}"
echo "PYTORCH_FINAL_PACKAGE_DIR=${RUNNER_TEMP}/artifacts" >> "${GITHUB_ENV}"
echo "WIN_PACKAGE_WORK_DIR=${RUNNER_TEMP}"
- uses: actions/download-artifact@v4.1.7
name: Download Build Artifacts
with:
name: libtorch-cuda12_9-shared-with-deps-debug
path: "${{ env.PYTORCH_FINAL_PACKAGE_DIR }}"
- name: Populate binary env
shell: bash
run: |
"${PYTORCH_ROOT}/.circleci/scripts/binary_populate_env.sh"
- name: Test PyTorch binary
shell: bash
run: |
"${PYTORCH_ROOT}/.circleci/scripts/binary_windows_test.sh"
- name: Wait until all sessions have drained
shell: powershell
working-directory: pytorch
if: always()
timeout-minutes: 120
run: |
.github\scripts\wait_for_ssh_to_drain.ps1
- name: Kill active ssh sessions if still around (Useful if workflow was cancelled)
shell: powershell
working-directory: pytorch
if: always()
run: |
.github\scripts\kill_active_ssh_sessions.ps1
libtorch-cuda12_9-shared-with-deps-debug-upload: # Uploading
if: ${{ github.repository_owner == 'pytorch' }}
permissions:
id-token: write
contents: read
needs: libtorch-cuda12_9-shared-with-deps-debug-test
with:
PYTORCH_ROOT: ${{ github.workspace }}/pytorch
PACKAGE_TYPE: libtorch
# TODO: This is a legacy variable that we eventually want to get rid of in
# favor of GPU_ARCH_VERSION
DESIRED_CUDA: cu129
GPU_ARCH_VERSION: "12.9"
GPU_ARCH_TYPE: cuda
LIBTORCH_CONFIG: debug
LIBTORCH_VARIANT: shared-with-deps
# This is a dummy value for libtorch to work correctly with our batch scripts
# without this value pip does not get installed for some reason
DESIRED_PYTHON: "3.10"
build_name: libtorch-cuda12_9-shared-with-deps-debug
secrets:
github-token: ${{ secrets.GITHUB_TOKEN }}
uses: ./.github/workflows/_binary-upload.yml
libtorch-cuda13_0-shared-with-deps-debug-build:
if: ${{ github.repository_owner == 'pytorch' }}
needs: get-label-type
runs-on: "${{ needs.get-label-type.outputs.label-type }}windows.4xlarge"
runs-on: "${{ needs.get-label-type.outputs.label-type }}windows.12xlarge"
timeout-minutes: 360
env:
PYTORCH_ROOT: ${{ github.workspace }}/pytorch

View File

@ -44,7 +44,7 @@ jobs:
libtorch-cpu-shared-with-deps-release-build:
if: ${{ github.repository_owner == 'pytorch' }}
needs: get-label-type
runs-on: "${{ needs.get-label-type.outputs.label-type }}windows.4xlarge"
runs-on: "${{ needs.get-label-type.outputs.label-type }}windows.12xlarge"
timeout-minutes: 360
env:
PYTORCH_ROOT: ${{ github.workspace }}/pytorch
@ -291,7 +291,7 @@ jobs:
libtorch-cuda12_6-shared-with-deps-release-build:
if: ${{ github.repository_owner == 'pytorch' }}
needs: get-label-type
runs-on: "${{ needs.get-label-type.outputs.label-type }}windows.4xlarge"
runs-on: "${{ needs.get-label-type.outputs.label-type }}windows.12xlarge"
timeout-minutes: 360
env:
PYTORCH_ROOT: ${{ github.workspace }}/pytorch
@ -541,7 +541,7 @@ jobs:
libtorch-cuda12_8-shared-with-deps-release-build:
if: ${{ github.repository_owner == 'pytorch' }}
needs: get-label-type
runs-on: "${{ needs.get-label-type.outputs.label-type }}windows.4xlarge"
runs-on: "${{ needs.get-label-type.outputs.label-type }}windows.12xlarge"
timeout-minutes: 360
env:
PYTORCH_ROOT: ${{ github.workspace }}/pytorch
@ -788,260 +788,10 @@ jobs:
secrets:
github-token: ${{ secrets.GITHUB_TOKEN }}
uses: ./.github/workflows/_binary-upload.yml
libtorch-cuda12_9-shared-with-deps-release-build:
if: ${{ github.repository_owner == 'pytorch' }}
needs: get-label-type
runs-on: "${{ needs.get-label-type.outputs.label-type }}windows.4xlarge"
timeout-minutes: 360
env:
PYTORCH_ROOT: ${{ github.workspace }}/pytorch
PACKAGE_TYPE: libtorch
# TODO: This is a legacy variable that we eventually want to get rid of in
# favor of GPU_ARCH_VERSION
DESIRED_CUDA: cu129
GPU_ARCH_VERSION: "12.9"
GPU_ARCH_TYPE: cuda
SKIP_ALL_TESTS: 1
LIBTORCH_CONFIG: release
LIBTORCH_VARIANT: shared-with-deps
# This is a dummy value for libtorch to work correctly with our batch scripts
# without this value pip does not get installed for some reason
DESIRED_PYTHON: "3.10"
steps:
# NOTE: These environment variables are put here so that they can be applied on every job equally
# They are also here because setting them at a workflow level doesn't give us access to the
# runner.temp variable, which we need.
- name: Populate binary env
shell: bash
run: |
echo "BINARY_ENV_FILE=${RUNNER_TEMP}/env" >> "${GITHUB_ENV}"
echo "PYTORCH_FINAL_PACKAGE_DIR=${RUNNER_TEMP}/artifacts" >> "${GITHUB_ENV}"
echo "WIN_PACKAGE_WORK_DIR=${RUNNER_TEMP}"
- name: Display EC2 information
shell: bash
run: |
set -euo pipefail
function get_ec2_metadata() {
# Pulled from instance metadata endpoint for EC2
# see https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/instancedata-data-retrieval.html
category=$1
curl -H "X-aws-ec2-metadata-token: $(curl -s -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")" -fsSL "http://169.254.169.254/latest/meta-data/${category}"
}
echo "ami-id: $(get_ec2_metadata ami-id)"
echo "instance-id: $(get_ec2_metadata instance-id)"
echo "instance-type: $(get_ec2_metadata instance-type)"
echo "system info $(uname -a)"
- name: "[FB EMPLOYEES] Enable SSH (Click me for login details)"
uses: pytorch/test-infra/.github/actions/setup-ssh@main
continue-on-error: true
with:
github-secret: ${{ secrets.GITHUB_TOKEN }}
- name: Enable git long paths and symlinks on Windows and disable fsmonitor daemon
shell: bash
run: |
git config --global core.longpaths true
git config --global core.symlinks true
# https://git-scm.com/docs/git-fsmonitor--daemon. The daemon could lock
# the directory on Windows and prevent GHA from checking out as reported
# in https://github.com/actions/checkout/issues/1018
git config --global core.fsmonitor false
# Needed for binary builds, see: https://github.com/pytorch/pytorch/issues/73339#issuecomment-1058981560
- name: Enable long paths on Windows
shell: powershell
run: |
Set-ItemProperty -Path "HKLM:\\SYSTEM\CurrentControlSet\Control\FileSystem" -Name "LongPathsEnabled" -Value 1
# Since it's just a defensive command, the workflow should continue even the command fails. This step can be
# removed once Windows Defender is removed from the AMI
- name: Disables Windows Defender scheduled and real-time scanning for files in directories used by PyTorch
continue-on-error: true
shell: powershell
run: |
Add-MpPreference -ExclusionPath $(Get-Location).tostring(),$Env:TEMP -ErrorAction Ignore
# Let's both exclude the path and disable Windows Defender completely just to be sure
# that it doesn't interfere
Set-MpPreference -DisableRealtimeMonitoring $True -ErrorAction Ignore
- name: Checkout PyTorch
uses: actions/checkout@v4
with:
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
submodules: recursive
path: pytorch
show-progress: false
- name: Clean PyTorch checkout
run: |
# Remove any artifacts from the previous checkouts
git clean -fxd
working-directory: pytorch
- name: Populate binary env
shell: bash
run: |
"${PYTORCH_ROOT}/.circleci/scripts/binary_populate_env.sh"
- name: Build PyTorch binary
shell: bash
run: |
"${PYTORCH_ROOT}/.circleci/scripts/binary_windows_build.sh"
- uses: actions/upload-artifact@v4.4.0
if: always()
with:
name: libtorch-cuda12_9-shared-with-deps-release
retention-days: 14
if-no-files-found: error
path: "${{ env.PYTORCH_FINAL_PACKAGE_DIR }}"
- name: Wait until all sessions have drained
shell: powershell
working-directory: pytorch
if: always()
timeout-minutes: 120
run: |
.github\scripts\wait_for_ssh_to_drain.ps1
- name: Kill active ssh sessions if still around (Useful if workflow was cancelled)
shell: powershell
working-directory: pytorch
if: always()
run: |
.github\scripts\kill_active_ssh_sessions.ps1
libtorch-cuda12_9-shared-with-deps-release-test: # Testing
if: ${{ github.repository_owner == 'pytorch' }}
needs:
- libtorch-cuda12_9-shared-with-deps-release-build
- get-label-type
runs-on: "${{ needs.get-label-type.outputs.label-type }}windows.g4dn.xlarge"
timeout-minutes: 360
env:
PYTORCH_ROOT: ${{ github.workspace }}/pytorch
PACKAGE_TYPE: libtorch
# TODO: This is a legacy variable that we eventually want to get rid of in
# favor of GPU_ARCH_VERSION
DESIRED_CUDA: cu129
GPU_ARCH_VERSION: "12.9"
GPU_ARCH_TYPE: cuda
SKIP_ALL_TESTS: 1
LIBTORCH_CONFIG: release
LIBTORCH_VARIANT: shared-with-deps
# This is a dummy value for libtorch to work correctly with our batch scripts
# without this value pip does not get installed for some reason
DESIRED_PYTHON: "3.10"
steps:
- name: Display EC2 information
shell: bash
run: |
set -euo pipefail
function get_ec2_metadata() {
# Pulled from instance metadata endpoint for EC2
# see https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/instancedata-data-retrieval.html
category=$1
curl -H "X-aws-ec2-metadata-token: $(curl -s -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")" -fsSL "http://169.254.169.254/latest/meta-data/${category}"
}
echo "ami-id: $(get_ec2_metadata ami-id)"
echo "instance-id: $(get_ec2_metadata instance-id)"
echo "instance-type: $(get_ec2_metadata instance-type)"
echo "system info $(uname -a)"
- name: "[FB EMPLOYEES] Enable SSH (Click me for login details)"
uses: pytorch/test-infra/.github/actions/setup-ssh@main
continue-on-error: true
with:
github-secret: ${{ secrets.GITHUB_TOKEN }}
- name: Enable git long paths and symlinks on Windows and disable fsmonitor daemon
shell: bash
run: |
git config --global core.longpaths true
git config --global core.symlinks true
# https://git-scm.com/docs/git-fsmonitor--daemon. The daemon could lock
# the directory on Windows and prevent GHA from checking out as reported
# in https://github.com/actions/checkout/issues/1018
git config --global core.fsmonitor false
# Needed for binary builds, see: https://github.com/pytorch/pytorch/issues/73339#issuecomment-1058981560
- name: Enable long paths on Windows
shell: powershell
run: |
Set-ItemProperty -Path "HKLM:\\SYSTEM\CurrentControlSet\Control\FileSystem" -Name "LongPathsEnabled" -Value 1
# Since it's just a defensive command, the workflow should continue even the command fails. This step can be
# removed once Windows Defender is removed from the AMI
- name: Disables Windows Defender scheduled and real-time scanning for files in directories used by PyTorch
continue-on-error: true
shell: powershell
run: |
Add-MpPreference -ExclusionPath $(Get-Location).tostring(),$Env:TEMP -ErrorAction Ignore
# Let's both exclude the path and disable Windows Defender completely just to be sure
# that it doesn't interfere
Set-MpPreference -DisableRealtimeMonitoring $True -ErrorAction Ignore
- name: Checkout PyTorch
uses: actions/checkout@v4
with:
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
submodules: recursive
path: pytorch
show-progress: false
- name: Clean PyTorch checkout
run: |
# Remove any artifacts from the previous checkouts
git clean -fxd
working-directory: pytorch
# NOTE: These environment variables are put here so that they can be applied on every job equally
# They are also here because setting them at a workflow level doesn't give us access to the
# runner.temp variable, which we need.
- name: Populate binary env
shell: bash
run: |
echo "BINARY_ENV_FILE=${RUNNER_TEMP}/env" >> "${GITHUB_ENV}"
echo "PYTORCH_FINAL_PACKAGE_DIR=${RUNNER_TEMP}/artifacts" >> "${GITHUB_ENV}"
echo "WIN_PACKAGE_WORK_DIR=${RUNNER_TEMP}"
- uses: actions/download-artifact@v4.1.7
name: Download Build Artifacts
with:
name: libtorch-cuda12_9-shared-with-deps-release
path: "${{ env.PYTORCH_FINAL_PACKAGE_DIR }}"
- name: Populate binary env
shell: bash
run: |
"${PYTORCH_ROOT}/.circleci/scripts/binary_populate_env.sh"
- name: Test PyTorch binary
shell: bash
run: |
"${PYTORCH_ROOT}/.circleci/scripts/binary_windows_test.sh"
- name: Wait until all sessions have drained
shell: powershell
working-directory: pytorch
if: always()
timeout-minutes: 120
run: |
.github\scripts\wait_for_ssh_to_drain.ps1
- name: Kill active ssh sessions if still around (Useful if workflow was cancelled)
shell: powershell
working-directory: pytorch
if: always()
run: |
.github\scripts\kill_active_ssh_sessions.ps1
libtorch-cuda12_9-shared-with-deps-release-upload: # Uploading
if: ${{ github.repository_owner == 'pytorch' }}
permissions:
id-token: write
contents: read
needs: libtorch-cuda12_9-shared-with-deps-release-test
with:
PYTORCH_ROOT: ${{ github.workspace }}/pytorch
PACKAGE_TYPE: libtorch
# TODO: This is a legacy variable that we eventually want to get rid of in
# favor of GPU_ARCH_VERSION
DESIRED_CUDA: cu129
GPU_ARCH_VERSION: "12.9"
GPU_ARCH_TYPE: cuda
LIBTORCH_CONFIG: release
LIBTORCH_VARIANT: shared-with-deps
# This is a dummy value for libtorch to work correctly with our batch scripts
# without this value pip does not get installed for some reason
DESIRED_PYTHON: "3.10"
build_name: libtorch-cuda12_9-shared-with-deps-release
secrets:
github-token: ${{ secrets.GITHUB_TOKEN }}
uses: ./.github/workflows/_binary-upload.yml
libtorch-cuda13_0-shared-with-deps-release-build:
if: ${{ github.repository_owner == 'pytorch' }}
needs: get-label-type
runs-on: "${{ needs.get-label-type.outputs.label-type }}windows.4xlarge"
runs-on: "${{ needs.get-label-type.outputs.label-type }}windows.12xlarge"
timeout-minutes: 360
env:
PYTORCH_ROOT: ${{ github.workspace }}/pytorch

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,132 @@
name: inductor-perf-nightly-rocm-mi300
on:
push:
tags:
- ciflow/inductor-perf-test-nightly-rocm-mi300/*
schedule:
- cron: 15 0 * * *
# NB: GitHub has an upper limit of 10 inputs here, so before we can sort it
# out, let try to run torchao cudagraphs_low_precision as part of cudagraphs
workflow_dispatch:
inputs:
training:
description: Run training (on by default)?
required: false
type: boolean
default: true
inference:
description: Run inference (on by default)?
required: false
type: boolean
default: true
default:
description: Run inductor_default?
required: false
type: boolean
default: false
dynamic:
description: Run inductor_dynamic_shapes?
required: false
type: boolean
default: false
cppwrapper:
description: Run inductor_cpp_wrapper?
required: false
type: boolean
default: false
cudagraphs:
description: Run inductor_cudagraphs?
required: false
type: boolean
default: true
freezing_cudagraphs:
description: Run inductor_cudagraphs with freezing for inference?
required: false
type: boolean
default: false
aotinductor:
description: Run aot_inductor for inference?
required: false
type: boolean
default: false
maxautotune:
description: Run inductor_max_autotune?
required: false
type: boolean
default: false
benchmark_configs:
description: The list of configs used the benchmark
required: false
type: string
default: inductor_huggingface_perf_rocm_mi300,inductor_timm_perf_rocm_mi300,inductor_torchbench_perf_rocm_mi300
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref_name }}-${{ github.ref_type == 'branch' && github.sha }}-${{ github.event_name == 'workflow_dispatch' }}-${{ github.event_name == 'schedule' }}
cancel-in-progress: true
permissions: read-all
jobs:
get-label-type:
name: get-label-type
uses: pytorch/pytorch/.github/workflows/_runner-determinator.yml@main
if: ${{ (github.event_name != 'schedule' || github.repository == 'pytorch/pytorch') && github.repository_owner == 'pytorch' }}
with:
triggering_actor: ${{ github.triggering_actor }}
issue_owner: ${{ github.event.pull_request.user.login || github.event.issue.user.login }}
curr_branch: ${{ github.head_ref || github.ref_name }}
curr_ref_type: ${{ github.ref_type }}
opt_out_experiments: lf
linux-jammy-rocm-py3_10-inductor-benchmark-build:
if: github.repository_owner == 'pytorch'
name: rocm-py3_10-inductor-benchmark-build
uses: ./.github/workflows/_linux-build.yml
with:
build-environment: linux-jammy-rocm-py3_10
docker-image-name: ci-image:pytorch-linux-jammy-rocm-n-py3-benchmarks
test-matrix: |
{ include: [
{ config: "inductor_huggingface_perf_rocm_mi300", shard: 1, num_shards: 5, runner: "linux.rocm.gpu.gfx942.1" },
{ config: "inductor_huggingface_perf_rocm_mi300", shard: 2, num_shards: 5, runner: "linux.rocm.gpu.gfx942.1" },
{ config: "inductor_huggingface_perf_rocm_mi300", shard: 3, num_shards: 5, runner: "linux.rocm.gpu.gfx942.1" },
{ config: "inductor_huggingface_perf_rocm_mi300", shard: 4, num_shards: 5, runner: "linux.rocm.gpu.gfx942.1" },
{ config: "inductor_huggingface_perf_rocm_mi300", shard: 5, num_shards: 5, runner: "linux.rocm.gpu.gfx942.1" },
{ config: "inductor_timm_perf_rocm_mi300", shard: 1, num_shards: 7, runner: "linux.rocm.gpu.gfx942.1" },
{ config: "inductor_timm_perf_rocm_mi300", shard: 2, num_shards: 7, runner: "linux.rocm.gpu.gfx942.1" },
{ config: "inductor_timm_perf_rocm_mi300", shard: 3, num_shards: 7, runner: "linux.rocm.gpu.gfx942.1" },
{ config: "inductor_timm_perf_rocm_mi300", shard: 4, num_shards: 7, runner: "linux.rocm.gpu.gfx942.1" },
{ config: "inductor_timm_perf_rocm_mi300", shard: 5, num_shards: 7, runner: "linux.rocm.gpu.gfx942.1" },
{ config: "inductor_timm_perf_rocm_mi300", shard: 6, num_shards: 7, runner: "linux.rocm.gpu.gfx942.1" },
{ config: "inductor_timm_perf_rocm_mi300", shard: 7, num_shards: 7, runner: "linux.rocm.gpu.gfx942.1" },
{ config: "inductor_torchbench_perf_rocm_mi300", shard: 1, num_shards: 9, runner: "linux.rocm.gpu.gfx942.1" },
{ config: "inductor_torchbench_perf_rocm_mi300", shard: 2, num_shards: 9, runner: "linux.rocm.gpu.gfx942.1" },
{ config: "inductor_torchbench_perf_rocm_mi300", shard: 3, num_shards: 9, runner: "linux.rocm.gpu.gfx942.1" },
{ config: "inductor_torchbench_perf_rocm_mi300", shard: 4, num_shards: 9, runner: "linux.rocm.gpu.gfx942.1" },
{ config: "inductor_torchbench_perf_rocm_mi300", shard: 5, num_shards: 9, runner: "linux.rocm.gpu.gfx942.1" },
{ config: "inductor_torchbench_perf_rocm_mi300", shard: 6, num_shards: 9, runner: "linux.rocm.gpu.gfx942.1" },
{ config: "inductor_torchbench_perf_rocm_mi300", shard: 7, num_shards: 9, runner: "linux.rocm.gpu.gfx942.1" },
{ config: "inductor_torchbench_perf_rocm_mi300", shard: 8, num_shards: 9, runner: "linux.rocm.gpu.gfx942.1" },
{ config: "inductor_torchbench_perf_rocm_mi300", shard: 9, num_shards: 9, runner: "linux.rocm.gpu.gfx942.1" },
]}
secrets: inherit
linux-jammy-rocm-py3_10-inductor-benchmark-test:
permissions:
id-token: write
contents: read
name: rocm-py3_10-inductor-benchmark-test
uses: ./.github/workflows/_rocm-test.yml
needs: linux-jammy-rocm-py3_10-inductor-benchmark-build
with:
build-environment: linux-jammy-rocm-py3_10
dashboard-tag: training-true-inference-true-default-true-dynamic-true-cudagraphs-true-cppwrapper-true-aotinductor-true-freezing_cudagraphs-true-cudagraphs_low_precision-true
docker-image: ${{ needs.linux-jammy-rocm-py3_10-inductor-benchmark-build.outputs.docker-image }}
test-matrix: ${{ needs.linux-jammy-rocm-py3_10-inductor-benchmark-build.outputs.test-matrix }}
timeout-minutes: 720
# Disable monitor in perf tests for more investigation
disable-monitor: true
monitor-log-interval: 10
monitor-data-collect-interval: 2
secrets: inherit

View File

@ -1,11 +1,11 @@
name: inductor-perf-nightly-rocm
name: inductor-perf-nightly-rocm-mi355
on:
push:
tags:
- ciflow/inductor-perf-test-nightly-rocm/*
- ciflow/inductor-perf-test-nightly-rocm-mi355/*
schedule:
- cron: 0 7 * * 0,3
- cron: 15 0 * * *
# NB: GitHub has an upper limit of 10 inputs here, so before we can sort it
# out, let try to run torchao cudagraphs_low_precision as part of cudagraphs
workflow_dispatch:
@ -59,7 +59,7 @@ on:
description: The list of configs used the benchmark
required: false
type: string
default: inductor_huggingface_perf_rocm,inductor_timm_perf_rocm,inductor_torchbench_perf_rocm
default: inductor_huggingface_perf_rocm_mi355,inductor_timm_perf_rocm_mi355,inductor_torchbench_perf_rocm_mi355
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref_name }}-${{ github.ref_type == 'branch' && github.sha }}-${{ github.event_name == 'workflow_dispatch' }}-${{ github.event_name == 'schedule' }}
@ -88,23 +88,27 @@ jobs:
docker-image-name: ci-image:pytorch-linux-jammy-rocm-n-py3-benchmarks
test-matrix: |
{ include: [
{ config: "inductor_huggingface_perf_rocm", shard: 1, num_shards: 4, runner: "linux.rocm.gpu.gfx942.1" },
{ config: "inductor_huggingface_perf_rocm", shard: 2, num_shards: 4, runner: "linux.rocm.gpu.gfx942.1" },
{ config: "inductor_huggingface_perf_rocm", shard: 3, num_shards: 4, runner: "linux.rocm.gpu.gfx942.1" },
{ config: "inductor_huggingface_perf_rocm", shard: 4, num_shards: 4, runner: "linux.rocm.gpu.gfx942.1" },
{ config: "inductor_timm_perf_rocm", shard: 1, num_shards: 5, runner: "linux.rocm.gpu.gfx942.1" },
{ config: "inductor_timm_perf_rocm", shard: 2, num_shards: 5, runner: "linux.rocm.gpu.gfx942.1" },
{ config: "inductor_timm_perf_rocm", shard: 3, num_shards: 5, runner: "linux.rocm.gpu.gfx942.1" },
{ config: "inductor_timm_perf_rocm", shard: 4, num_shards: 5, runner: "linux.rocm.gpu.gfx942.1" },
{ config: "inductor_timm_perf_rocm", shard: 5, num_shards: 5, runner: "linux.rocm.gpu.gfx942.1" },
{ config: "inductor_torchbench_perf_rocm", shard: 1, num_shards: 8, runner: "linux.rocm.gpu.gfx942.1" },
{ config: "inductor_torchbench_perf_rocm", shard: 2, num_shards: 8, runner: "linux.rocm.gpu.gfx942.1" },
{ config: "inductor_torchbench_perf_rocm", shard: 3, num_shards: 8, runner: "linux.rocm.gpu.gfx942.1" },
{ config: "inductor_torchbench_perf_rocm", shard: 4, num_shards: 8, runner: "linux.rocm.gpu.gfx942.1" },
{ config: "inductor_torchbench_perf_rocm", shard: 5, num_shards: 8, runner: "linux.rocm.gpu.gfx942.1" },
{ config: "inductor_torchbench_perf_rocm", shard: 6, num_shards: 8, runner: "linux.rocm.gpu.gfx942.1" },
{ config: "inductor_torchbench_perf_rocm", shard: 7, num_shards: 8, runner: "linux.rocm.gpu.gfx942.1" },
{ config: "inductor_torchbench_perf_rocm", shard: 8, num_shards: 8, runner: "linux.rocm.gpu.gfx942.1" },
{ config: "inductor_huggingface_perf_rocm_mi355", shard: 1, num_shards: 5, runner: "linux.rocm.gpu.mi355.1" },
{ config: "inductor_huggingface_perf_rocm_mi355", shard: 2, num_shards: 5, runner: "linux.rocm.gpu.mi355.1" },
{ config: "inductor_huggingface_perf_rocm_mi355", shard: 3, num_shards: 5, runner: "linux.rocm.gpu.mi355.1" },
{ config: "inductor_huggingface_perf_rocm_mi355", shard: 4, num_shards: 5, runner: "linux.rocm.gpu.mi355.1" },
{ config: "inductor_huggingface_perf_rocm_mi355", shard: 5, num_shards: 5, runner: "linux.rocm.gpu.mi355.1" },
{ config: "inductor_timm_perf_rocm_mi355", shard: 1, num_shards: 7, runner: "linux.rocm.gpu.mi355.1" },
{ config: "inductor_timm_perf_rocm_mi355", shard: 2, num_shards: 7, runner: "linux.rocm.gpu.mi355.1" },
{ config: "inductor_timm_perf_rocm_mi355", shard: 3, num_shards: 7, runner: "linux.rocm.gpu.mi355.1" },
{ config: "inductor_timm_perf_rocm_mi355", shard: 4, num_shards: 7, runner: "linux.rocm.gpu.mi355.1" },
{ config: "inductor_timm_perf_rocm_mi355", shard: 5, num_shards: 7, runner: "linux.rocm.gpu.mi355.1" },
{ config: "inductor_timm_perf_rocm_mi355", shard: 6, num_shards: 7, runner: "linux.rocm.gpu.mi355.1" },
{ config: "inductor_timm_perf_rocm_mi355", shard: 7, num_shards: 7, runner: "linux.rocm.gpu.mi355.1" },
{ config: "inductor_torchbench_perf_rocm_mi355", shard: 1, num_shards: 9, runner: "linux.rocm.gpu.mi355.1" },
{ config: "inductor_torchbench_perf_rocm_mi355", shard: 2, num_shards: 9, runner: "linux.rocm.gpu.mi355.1" },
{ config: "inductor_torchbench_perf_rocm_mi355", shard: 3, num_shards: 9, runner: "linux.rocm.gpu.mi355.1" },
{ config: "inductor_torchbench_perf_rocm_mi355", shard: 4, num_shards: 9, runner: "linux.rocm.gpu.mi355.1" },
{ config: "inductor_torchbench_perf_rocm_mi355", shard: 5, num_shards: 9, runner: "linux.rocm.gpu.mi355.1" },
{ config: "inductor_torchbench_perf_rocm_mi355", shard: 6, num_shards: 9, runner: "linux.rocm.gpu.mi355.1" },
{ config: "inductor_torchbench_perf_rocm_mi355", shard: 7, num_shards: 9, runner: "linux.rocm.gpu.mi355.1" },
{ config: "inductor_torchbench_perf_rocm_mi355", shard: 8, num_shards: 9, runner: "linux.rocm.gpu.mi355.1" },
{ config: "inductor_torchbench_perf_rocm_mi355", shard: 9, num_shards: 9, runner: "linux.rocm.gpu.mi355.1" },
]}
secrets: inherit

View File

@ -88,7 +88,6 @@ jobs:
with:
build-environment: linux-jammy-rocm-py3_10
docker-image-name: ci-image:pytorch-linux-jammy-rocm-n-py3-benchmarks
sync-tag: rocm-build
test-matrix: |
{ include: [
{ config: "dynamo_eager_torchbench", shard: 1, num_shards: 2, runner: "linux.rocm.gpu.gfx942.1" },

View File

@ -118,9 +118,9 @@ jobs:
CHANGED_FILES="${{ needs.get-changed-files.outputs.changed-files }}"
echo "Running all other linters"
if [ "$CHANGED_FILES" = '*' ]; then
ADDITIONAL_LINTRUNNER_ARGS="--skip CLANGTIDY,CLANGFORMAT,MYPY,MYPYSTRICT --all-files" .github/scripts/lintrunner.sh
ADDITIONAL_LINTRUNNER_ARGS="--skip CLANGTIDY,CLANGFORMAT,MYPY,MYPYSTRICT,PYREFLY --all-files" .github/scripts/lintrunner.sh
else
ADDITIONAL_LINTRUNNER_ARGS="--skip CLANGTIDY,CLANGFORMAT,MYPY,MYPYSTRICT ${CHANGED_FILES}" .github/scripts/lintrunner.sh
ADDITIONAL_LINTRUNNER_ARGS="--skip CLANGTIDY,CLANGFORMAT,MYPY,MYPYSTRICT,PYREFLY ${CHANGED_FILES}" .github/scripts/lintrunner.sh
fi
quick-checks:

View File

@ -7,9 +7,11 @@ on:
workflow_dispatch:
inputs:
test_mode:
required: false
type: string
default: 'short'
type: choice
options:
- 'short'
- 'long'
- 'all'
description: tag filter for operator benchmarks, options from long, short, all
schedule:
# Run at 07:00 UTC every Sunday
@ -28,38 +30,49 @@ permissions:
contents: read
jobs:
opbenchmark-build:
x86-opbenchmark-build:
if: github.repository_owner == 'pytorch'
name: opbenchmark-build
name: x86-opbenchmark-build
uses: ./.github/workflows/_linux-build.yml
with:
build-environment: linux-jammy-py3.10-gcc11-build
docker-image-name: ci-image:pytorch-linux-jammy-py3-gcc11-inductor-benchmarks
test-matrix: |
{ include: [
{ config: "cpu_operator_benchmark_short", shard: 1, num_shards: 1, runner: "linux.12xlarge" },
{ config: "cpu_operator_benchmark_${{ inputs.test_mode || 'short' }}", shard: 1, num_shards: 1, runner: "linux.12xlarge" },
]}
secrets: inherit
opbenchmark-on-demand-build:
if: ${{ github.event_name == 'workflow_dispatch' && github.repository_owner == 'pytorch' }}
name: opbenchmark-on-demand-build
uses: ./.github/workflows/_linux-build.yml
with:
build-environment: linux-jammy-py3.10-gcc11-build
docker-image-name: ci-image:pytorch-linux-jammy-py3-gcc11-inductor-benchmarks
test-matrix: |
{ include: [
{ config: "cpu_operator_benchmark_${{ inputs.test_mode }}", shard: 1, num_shards: 1, runner: "linux.12xlarge" },
]}
secrets: inherit
opbenchmark-test:
name: opbenchmark-test
x86-opbenchmark-test:
name: x86-opbenchmark-test
uses: ./.github/workflows/_linux-test.yml
needs: opbenchmark-build
needs: x86-opbenchmark-build
with:
build-environment: linux-jammy-py3.10-gcc11-build
docker-image: ${{ needs.opbenchmark-build.outputs.docker-image }}
test-matrix: ${{ needs.opbenchmark-build.outputs.test-matrix }}
docker-image: ${{ needs.x86-opbenchmark-build.outputs.docker-image }}
test-matrix: ${{ needs.x86-opbenchmark-build.outputs.test-matrix }}
secrets: inherit
aarch64-opbenchmark-build:
if: github.repository_owner == 'pytorch'
name: aarch64-opbenchmark-build
uses: ./.github/workflows/_linux-build.yml
with:
build-environment: linux-jammy-aarch64-py3.10
runner: linux.arm64.m7g.4xlarge
docker-image-name: ci-image:pytorch-linux-jammy-aarch64-py3.10-gcc11
test-matrix: |
{ include: [
{ config: "cpu_operator_benchmark_short", shard: 1, num_shards: 1, runner: "linux.arm64.m8g.4xlarge" },
]}
secrets: inherit
aarch64-opbenchmark-test:
name: aarch64-opbenchmark-test
uses: ./.github/workflows/_linux-test.yml
needs: aarch64-opbenchmark-build
with:
build-environment: linux-jammy-aarch64-py3.10
docker-image: ${{ needs.aarch64-opbenchmark-build.outputs.docker-image }}
test-matrix: ${{ needs.aarch64-opbenchmark-build.outputs.test-matrix }}
secrets: inherit

View File

@ -147,15 +147,16 @@ jobs:
runner_prefix: "${{ needs.get-label-type.outputs.label-type }}"
build-environment: linux-jammy-cuda12.8-py3.10-gcc9-debug
docker-image-name: ci-image:pytorch-linux-jammy-cuda12.8-cudnn9-py3-gcc9
cuda-arch-list: 8.9
test-matrix: |
{ include: [
{ config: "default", shard: 1, num_shards: 7, runner: "${{ needs.get-label-type.outputs.label-type }}linux.4xlarge.nvidia.gpu", owners: ["oncall:debug-build"] },
{ config: "default", shard: 2, num_shards: 7, runner: "${{ needs.get-label-type.outputs.label-type }}linux.4xlarge.nvidia.gpu", owners: ["oncall:debug-build"] },
{ config: "default", shard: 3, num_shards: 7, runner: "${{ needs.get-label-type.outputs.label-type }}linux.4xlarge.nvidia.gpu", owners: ["oncall:debug-build"] },
{ config: "default", shard: 4, num_shards: 7, runner: "${{ needs.get-label-type.outputs.label-type }}linux.4xlarge.nvidia.gpu", owners: ["oncall:debug-build"] },
{ config: "default", shard: 5, num_shards: 7, runner: "${{ needs.get-label-type.outputs.label-type }}linux.4xlarge.nvidia.gpu", owners: ["oncall:debug-build"] },
{ config: "default", shard: 6, num_shards: 7, runner: "${{ needs.get-label-type.outputs.label-type }}linux.4xlarge.nvidia.gpu", owners: ["oncall:debug-build"] },
{ config: "default", shard: 7, num_shards: 7, runner: "${{ needs.get-label-type.outputs.label-type }}linux.4xlarge.nvidia.gpu", owners: ["oncall:debug-build"] },
{ config: "default", shard: 1, num_shards: 7, runner: "${{ needs.get-label-type.outputs.label-type }}linux.g6.4xlarge.experimental.nvidia.gpu", owners: ["oncall:debug-build"] },
{ config: "default", shard: 2, num_shards: 7, runner: "${{ needs.get-label-type.outputs.label-type }}linux.g6.4xlarge.experimental.nvidia.gpu", owners: ["oncall:debug-build"] },
{ config: "default", shard: 3, num_shards: 7, runner: "${{ needs.get-label-type.outputs.label-type }}linux.g6.4xlarge.experimental.nvidia.gpu", owners: ["oncall:debug-build"] },
{ config: "default", shard: 4, num_shards: 7, runner: "${{ needs.get-label-type.outputs.label-type }}linux.g6.4xlarge.experimental.nvidia.gpu", owners: ["oncall:debug-build"] },
{ config: "default", shard: 5, num_shards: 7, runner: "${{ needs.get-label-type.outputs.label-type }}linux.g6.4xlarge.experimental.nvidia.gpu", owners: ["oncall:debug-build"] },
{ config: "default", shard: 6, num_shards: 7, runner: "${{ needs.get-label-type.outputs.label-type }}linux.g6.4xlarge.experimental.nvidia.gpu", owners: ["oncall:debug-build"] },
{ config: "default", shard: 7, num_shards: 7, runner: "${{ needs.get-label-type.outputs.label-type }}linux.g6.4xlarge.experimental.nvidia.gpu", owners: ["oncall:debug-build"] },
]}
secrets: inherit

View File

@ -347,7 +347,8 @@ jobs:
uses: ./.github/workflows/_linux-build.yml
needs: get-label-type
with:
sync-tag: linux-xpu-n-build
# This should sync with the build in xpu.yml but xpu uses a larger runner
# sync-tag: linux-xpu-n-build
runner_prefix: ${{ needs.get-label-type.outputs.label-type }}
build-environment: linux-jammy-xpu-n-py3.10
docker-image-name: ci-image:pytorch-linux-jammy-xpu-n-py3

View File

@ -45,7 +45,6 @@ jobs:
runner_prefix: "${{ needs.get-label-type.outputs.label-type }}"
build-environment: linux-noble-rocm-py3.12-mi300
docker-image-name: ci-image:pytorch-linux-noble-rocm-n-py3
sync-tag: rocm-build
test-matrix: |
{ include: [
{ config: "default", shard: 1, num_shards: 6, runner: "linux.rocm.gpu.gfx942.1" },

View File

@ -42,15 +42,14 @@ jobs:
runner_prefix: "${{ needs.get-label-type.outputs.label-type }}"
build-environment: linux-noble-rocm-py3.12-mi355
docker-image-name: ci-image:pytorch-linux-noble-rocm-n-py3
sync-tag: rocm-build
test-matrix: |
{ include: [
{ config: "default", shard: 1, num_shards: 6, runner: "linux.rocm.gpu.mi355.2" },
{ config: "default", shard: 2, num_shards: 6, runner: "linux.rocm.gpu.mi355.2" },
{ config: "default", shard: 3, num_shards: 6, runner: "linux.rocm.gpu.mi355.2" },
{ config: "default", shard: 4, num_shards: 6, runner: "linux.rocm.gpu.mi355.2" },
{ config: "default", shard: 5, num_shards: 6, runner: "linux.rocm.gpu.mi355.2" },
{ config: "default", shard: 6, num_shards: 6, runner: "linux.rocm.gpu.mi355.2" },
{ config: "default", shard: 1, num_shards: 6, runner: "linux.rocm.gpu.mi355.1" },
{ config: "default", shard: 2, num_shards: 6, runner: "linux.rocm.gpu.mi355.1" },
{ config: "default", shard: 3, num_shards: 6, runner: "linux.rocm.gpu.mi355.1" },
{ config: "default", shard: 4, num_shards: 6, runner: "linux.rocm.gpu.mi355.1" },
{ config: "default", shard: 5, num_shards: 6, runner: "linux.rocm.gpu.mi355.1" },
{ config: "default", shard: 6, num_shards: 6, runner: "linux.rocm.gpu.mi355.1" },
]}
secrets: inherit

75
.github/workflows/rocm-navi31.yml vendored Normal file
View File

@ -0,0 +1,75 @@
name: rocm-navi31
on:
push:
tags:
- ciflow/rocm-navi31/*
workflow_dispatch:
schedule:
# We have several schedules so jobs can check github.event.schedule to activate only for a fraction of the runs.
# Also run less frequently on weekends.
- cron: 45 */2 * * 1-5
- cron: 45 4,12 * * 0,6
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref_name }}-${{ github.ref_type == 'branch' && github.sha }}-${{ github.event_name == 'workflow_dispatch' }}-${{ github.event_name == 'schedule' }}
cancel-in-progress: true
permissions: read-all
jobs:
target-determination:
if: github.repository_owner == 'pytorch'
name: before-test
uses: ./.github/workflows/target_determination.yml
permissions:
id-token: write
contents: read
get-label-type:
name: get-label-type
uses: pytorch/pytorch/.github/workflows/_runner-determinator.yml@main
if: ${{ (github.event_name != 'schedule' || github.repository == 'pytorch/pytorch') && github.repository_owner == 'pytorch' }}
with:
triggering_actor: ${{ github.triggering_actor }}
issue_owner: ${{ github.event.pull_request.user.login || github.event.issue.user.login }}
curr_branch: ${{ github.head_ref || github.ref_name }}
curr_ref_type: ${{ github.ref_type }}
linux-jammy-rocm-py3_10-build:
if: ${{ (github.event_name != 'schedule' || github.repository == 'pytorch/pytorch') && github.repository_owner == 'pytorch' }}
name: linux-jammy-rocm-py3.10
uses: ./.github/workflows/_linux-build.yml
needs: get-label-type
with:
runner_prefix: "${{ needs.get-label-type.outputs.label-type }}"
build-environment: linux-jammy-rocm-py3.10
docker-image-name: ci-image:pytorch-linux-jammy-rocm-n-py3
sync-tag: rocm-build
test-matrix: |
{ include: [
{ config: "default", shard: 1, num_shards: 2, runner: "linux.rocm.gpu.gfx1100" },
{ config: "default", shard: 2, num_shards: 2, runner: "linux.rocm.gpu.gfx1100" },
]}
secrets: inherit
linux-jammy-rocm-py3_10-test:
permissions:
id-token: write
contents: read
name: linux-jammy-rocm-py3_10
uses: ./.github/workflows/_rocm-test.yml
needs:
- linux-jammy-rocm-py3_10-build
- target-determination
with:
build-environment: linux-jammy-rocm-py3.10
docker-image: ${{ needs.linux-jammy-rocm-py3_10-build.outputs.docker-image }}
test-matrix: ${{ needs.linux-jammy-rocm-py3_10-build.outputs.test-matrix }}
tests-to-include: >-
${{ github.event_name == 'schedule' && 'test_nn test_torch test_cuda test_ops test_unary_ufuncs test_binary_ufuncs
test_autograd inductor/test_torchinductor inductor/test_kernel_benchmark
inductor/test_pad_mm inductor/test_benchmark_fusion inductor/test_aot_inductor
inductor/test_torchinductor inductor/test_decompose_mem_bound_mm
inductor/test_flex_attention inductor/test_max_autotune' || '' }}
secrets: inherit

View File

@ -26,11 +26,23 @@ jobs:
id-token: write
contents: read
get-label-type:
name: get-label-type
uses: pytorch/pytorch/.github/workflows/_runner-determinator.yml@main
if: ${{ (github.event_name != 'schedule' || github.repository == 'pytorch/pytorch') && github.repository_owner == 'pytorch' }}
with:
triggering_actor: ${{ github.triggering_actor }}
issue_owner: ${{ github.event.pull_request.user.login || github.event.issue.user.login }}
curr_branch: ${{ github.head_ref || github.ref_name }}
curr_ref_type: ${{ github.ref_type }}
linux-jammy-rocm-py3_10-build:
if: ${{ (github.event_name != 'schedule' || github.repository == 'pytorch/pytorch') && github.repository_owner == 'pytorch' }}
name: linux-jammy-rocm-py3.10
uses: ./.github/workflows/_linux-build.yml
needs: get-label-type
with:
runner_prefix: "${{ needs.get-label-type.outputs.label-type }}"
build-environment: linux-jammy-rocm-py3.10
docker-image-name: ci-image:pytorch-linux-jammy-rocm-n-py3
sync-tag: rocm-build
@ -59,29 +71,3 @@ jobs:
docker-image: ${{ needs.linux-jammy-rocm-py3_10-build.outputs.docker-image }}
test-matrix: ${{ needs.linux-jammy-rocm-py3_10-build.outputs.test-matrix }}
secrets: inherit
linux-jammy-rocm-py3_10-gfx1100-test:
if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }}
permissions:
id-token: write
contents: read
name: linux-jammy-rocm-py3_10-gfx1100
uses: ./.github/workflows/_rocm-test.yml
needs:
- linux-jammy-rocm-py3_10-build
- target-determination
with:
build-environment: linux-jammy-rocm-py3.10
docker-image: ${{ needs.linux-jammy-rocm-py3_10-build.outputs.docker-image }}
test-matrix: |
{ include: [
{ config: "default", shard: 1, num_shards: 2, runner: "linux.rocm.gpu.gfx1100" },
{ config: "default", shard: 2, num_shards: 2, runner: "linux.rocm.gpu.gfx1100" },
]}
tests-to-include: >
test_nn test_torch test_cuda test_ops test_unary_ufuncs test_binary_ufuncs
test_autograd inductor/test_torchinductor inductor/test_kernel_benchmark
inductor/test_pad_mm inductor/test_benchmark_fusion inductor/test_aot_inductor
inductor/test_torchinductor inductor/test_decompose_mem_bound_mm
inductor/test_flex_attention inductor/test_max_autotune
secrets: inherit

View File

@ -58,8 +58,10 @@ jobs:
else
COMMIT_SHA="${{ github.sha }}"
fi
echo "sha=${COMMIT_SHA}" >> "${GITHUB_OUTPUT}"
echo "tag_name=trunk/${COMMIT_SHA}" >> "${GITHUB_OUTPUT}"
{
echo "sha=${COMMIT_SHA}"
echo "tag_name=trunk/${COMMIT_SHA}"
} >> "${GITHUB_OUTPUT}"
- name: Validate commit SHA
run: |
@ -87,7 +89,7 @@ jobs:
echo "✅ Commit ${COMMIT_SHA} is valid (automatic push trigger)"
fi
- name: Create and push tag with retry
- name: Create and push tag(s) with retry
id: check_tag
env:
TAG_NAME: ${{ steps.commit.outputs.tag_name }}
@ -112,14 +114,23 @@ jobs:
return 1
}
# Exit early if tag already exists
if check_tag_exists; then
echo "✅ Tag already exists - no action needed"
echo "exists=true" >> "${GITHUB_OUTPUT}"
exit 0
fi
# Counters for summary reporting
created_count=0
skipped_count=0
failed_count=0
echo "Tag ${TAG_NAME} does not exist, proceeding with creation"
# Always write outputs once on exit
finish() {
set +e
if [ -n "${GITHUB_OUTPUT:-}" ]; then
{
echo "created_count=${created_count}"
echo "skipped_count=${skipped_count}"
echo "failed_count=${failed_count}"
} >> "${GITHUB_OUTPUT}"
fi
}
trap finish EXIT
# Retry configuration
MAX_RETRIES=5
@ -194,31 +205,111 @@ jobs:
}
}
# Execute with retry
if retry_with_backoff "tag_with_retry" "Creating tag ${TAG_NAME} for commit ${COMMIT_SHA}"; then
echo "exists=false" >> "${GITHUB_OUTPUT}"
# New behavior for push events: enumerate commits in the push and tag each one.
# For workflow_dispatch, retain existing single-SHA behavior.
# Always fetch tags once up front to improve idempotency in loops
git fetch origin --tags --quiet || true
if [ "${{ github.event_name }}" = "push" ]; then
BEFORE_SHA="${{ github.event.before }}"
AFTER_SHA="${{ github.sha }}" # same as event.after
# List commits introduced by this push (old..new), oldest first for stable ordering
commits_file="$(mktemp)"
git rev-list --reverse "${BEFORE_SHA}..${AFTER_SHA}" > "${commits_file}"
if [ ! -s "${commits_file}" ]; then
echo "No new commits found between ${BEFORE_SHA}..${AFTER_SHA}; nothing to tag."
rm -f "${commits_file}"
exit 0
fi
commit_count="$(wc -l < "${commits_file}" | tr -d ' ')"
echo "Found ${commit_count} commit(s) to tag for push:"
while IFS= read -r sha; do
printf ' %s\n' "${sha}"
done < "${commits_file}"
while IFS= read -r sha; do
TAG_NAME="trunk/${sha}"
COMMIT_SHA="${sha}"
# If tag already exists locally or remotely, skip (idempotent)
if check_tag_exists; then
echo "✅ Tag ${TAG_NAME} already exists - skipping"
skipped_count=$((skipped_count + 1))
continue
fi
echo "Tag ${TAG_NAME} does not exist, proceeding with creation"
if retry_with_backoff "tag_with_retry" "Creating tag ${TAG_NAME} for commit ${COMMIT_SHA}"; then
created_count=$((created_count + 1))
else
echo "Tag creation failed after all retry attempts for ${TAG_NAME}"
failed_count=$((failed_count + 1))
fi
done < "${commits_file}"
rm -f "${commits_file}"
if [ "${failed_count}" -gt 0 ]; then
exit 1
fi
exit 0
else
echo "Tag creation failed after all retry attempts"
exit 1
# workflow_dispatch path (single SHA tagging preserved)
# Exit early if tag already exists
if check_tag_exists; then
echo "✅ Tag already exists - no action needed"
skipped_count=1
exit 0
fi
echo "Tag ${TAG_NAME} does not exist, proceeding with creation"
if retry_with_backoff "tag_with_retry" "Creating tag ${TAG_NAME} for commit ${COMMIT_SHA}"; then
created_count=1
exit 0
else
echo "Tag creation failed after all retry attempts"
failed_count=1
exit 1
fi
fi
- name: Tag creation summary
if: always()
run: |
if [ "${{ steps.check_tag.outputs.exists }}" = "true" ]; then
echo "✅ Tag ${{ steps.commit.outputs.tag_name }} already existed - no action needed"
elif [ "${{ job.status }}" = "success" ]; then
echo "✅ Successfully created tag ${{ steps.commit.outputs.tag_name }} for commit ${{ steps.commit.outputs.sha }}"
if [ "${{ github.event_name }}" = "push" ]; then
echo "Trigger: push on main"
echo "Created: ${{ steps.check_tag.outputs.created_count }}"
echo "Skipped (already existed): ${{ steps.check_tag.outputs.skipped_count }}"
echo "Failed: ${{ steps.check_tag.outputs.failed_count }}"
if [ "${{ steps.check_tag.outputs.failed_count }}" = "0" ]; then
echo "✅ Completed tagging for push range ${{ github.event.before }}..${{ github.sha }}"
else
echo "❌ Some tags failed to create for push range ${{ github.event.before }}..${{ github.sha }}"
fi
else
echo "❌ Failed to create tag ${{ steps.commit.outputs.tag_name }} for commit ${{ steps.commit.outputs.sha }}"
fi
if [ "${{ steps.check_tag.outputs.failed_count }}" = "0" ]; then
if [ "${{ steps.check_tag.outputs.created_count }}" = "0" ]; then
echo "✅ Tag ${{ steps.commit.outputs.tag_name }} already existed - no action needed"
else
echo "✅ Successfully created tag ${{ steps.commit.outputs.tag_name }} for commit ${{ steps.commit.outputs.sha }}"
fi
else
echo "❌ Failed to create tag ${{ steps.commit.outputs.tag_name }} for commit ${{ steps.commit.outputs.sha }}"
fi
echo ""
echo "Tag details:"
echo " Name: ${{ steps.commit.outputs.tag_name }}"
echo " Commit: ${{ steps.commit.outputs.sha }}"
echo " Trigger: ${{ github.event_name }}"
if [ -n "${{ github.event.inputs.commit_sha }}" ]; then
echo " Manual commit: ${{ github.event.inputs.commit_sha }}"
echo ""
echo "Tag details:"
echo " Name: ${{ steps.commit.outputs.tag_name }}"
echo " Commit: ${{ steps.commit.outputs.sha }}"
echo " Trigger: ${{ github.event_name }}"
if [ -n "${{ github.event.inputs.commit_sha }}" ]; then
echo " Manual commit: ${{ github.event.inputs.commit_sha }}"
fi
fi

View File

@ -180,16 +180,50 @@ jobs:
disable-monitor: false
secrets: inherit
win-vs2022-cuda12_6-py3-build:
name: win-vs2022-cuda12.6-py3
win-vs2022-cuda12_8-py3-build:
name: win-vs2022-cuda12.8-py3
uses: ./.github/workflows/_win-build.yml
needs: get-label-type
with:
build-environment: win-vs2022-cuda12.6-py3
cuda-version: "12.6"
build-environment: win-vs2022-cuda12.8-py3
cuda-version: "12.8"
runner: "${{ needs.get-label-type.outputs.label-type }}windows.4xlarge.nonephemeral"
secrets: inherit
linux-jammy-rocm-py3_10-build:
if: ${{ startsWith(github.event.ref, 'refs/tags/ciflow/trunk') }}
name: linux-jammy-rocm-py3.10
uses: ./.github/workflows/_linux-build.yml
needs: get-label-type
with:
runner_prefix: "${{ needs.get-label-type.outputs.label-type }}"
build-environment: linux-jammy-rocm-py3.10
docker-image-name: ci-image:pytorch-linux-jammy-rocm-n-py3
sync-tag: rocm-build
test-matrix: |
{ include: [
{ config: "default", shard: 1, num_shards: 2, runner: "linux.rocm.gpu.gfx942.1" },
{ config: "default", shard: 2, num_shards: 2, runner: "linux.rocm.gpu.gfx942.1" },
]}
secrets: inherit
linux-jammy-rocm-py3_10-test:
if: ${{ startsWith(github.event.ref, 'refs/tags/ciflow/trunk') }}
permissions:
id-token: write
contents: read
name: linux-jammy-rocm-py3.10
uses: ./.github/workflows/_rocm-test.yml
needs:
- linux-jammy-rocm-py3_10-build
- target-determination
with:
build-environment: linux-jammy-rocm-py3.10
docker-image: ${{ needs.linux-jammy-rocm-py3_10-build.outputs.docker-image }}
test-matrix: ${{ needs.linux-jammy-rocm-py3_10-build.outputs.test-matrix }}
tests-to-include: "test_nn test_torch test_cuda test_ops test_unary_ufuncs test_binary_ufuncs test_autograd inductor/test_torchinductor"
secrets: inherit
inductor-build:
name: inductor-build
uses: ./.github/workflows/_linux-build.yml
@ -200,6 +234,23 @@ jobs:
cuda-arch-list: '8.0'
secrets: inherit
# Test cross-compiled models with Windows libs extracted from wheel
cross-compile-linux-test:
name: cross-compile-linux-test
uses: ./.github/workflows/_linux-test.yml
needs:
- linux-jammy-cuda12_8-py3_10-gcc11-build
- get-label-type
- win-vs2022-cuda12_8-py3-build
with:
build-environment: linux-jammy-cuda12.8-py3.10-gcc11
docker-image: ${{ needs.linux-jammy-cuda12_8-py3_10-gcc11-build.outputs.docker-image }}
test-matrix: |
{ include: [
{ config: "aoti_cross_compile_for_windows", shard: 1, num_shards: 1, runner: "${{ needs.get-label-type.outputs.label-type }}linux.g6.4xlarge.experimental.nvidia.gpu", win_torch_wheel_artifact: "win-vs2022-cuda12.8-py3" },
]}
secrets: inherit
verify-cachebench-cpu-build:
name: verify-cachebench-cpu-build
uses: ./.github/workflows/_linux-build.yml

2
.gitignore vendored
View File

@ -374,6 +374,7 @@ third_party/ruy/
third_party/glog/
# Virtualenv
.venv/
venv/
# Log files
@ -395,3 +396,4 @@ android/pytorch_android_torchvision/.cxx
CLAUDE.local.md
/test_*.py
/debug_*.py
CLAUDE_CONTEXT/

View File

@ -209,6 +209,46 @@ command = [
'@{{PATHSFILE}}'
]
[[linter]]
code = 'PYREFLY'
include_patterns = [
'torch/**/*.py',
'torch/**/*.pyi',
'torchgen/**/*.py',
'torchgen/**/*.pyi',
'functorch/**/*.py',
'functorch/**/*.pyi',
]
exclude_patterns = []
command = [
'python3',
'tools/linter/adapters/pyrefly_linter.py',
'--config=pyrefly.toml',
]
init_command = [
'python3',
'tools/linter/adapters/pip_init.py',
'--dry-run={{DRYRUN}}',
'numpy==2.1.0 ; python_version >= "3.12"',
'expecttest==0.3.0',
'pyrefly==0.36.2',
'sympy==1.13.3',
'types-requests==2.27.25',
'types-pyyaml==6.0.2',
'types-tabulate==0.8.8',
'types-protobuf==5.29.1.20250403',
'types-setuptools==79.0.0.20250422',
'types-jinja2==2.11.9',
'types-colorama==0.4.6',
'filelock==3.18.0',
'junitparser==2.1.1',
'rich==14.1.0',
'optree==0.17.0',
'types-openpyxl==3.1.5.20250919',
'types-python-dateutil==2.9.0.20251008'
]
[[linter]]
code = 'CLANGTIDY'
include_patterns = [
@ -793,8 +833,7 @@ exclude_patterns = [
command = [
'python3',
'tools/linter/adapters/grep_linter.py',
'--pattern=cudaSetDevice(',
'--pattern=cudaGetDevice(',
'--pattern=(cudaSetDevice|cudaGetDevice)\\(',
'--linter-name=RAWCUDADEVICE',
'--error-name=raw CUDA API usage',
"""--error-description=\
@ -1098,11 +1137,8 @@ command = [
[[linter]]
code = 'WORKFLOWSYNC'
include_patterns = [
'.github/workflows/pull.yml',
'.github/workflows/trunk.yml',
'.github/workflows/periodic.yml',
'.github/workflows/mac-mps.yml',
'.github/workflows/slow.yml',
'.github/workflows/*.yml',
'.github/workflows/*.yaml',
]
command = [
'python3',

View File

@ -201,3 +201,17 @@ torch/backends/cudnn/ @eqy @syed-ahmed @Aidyn-A
/torch/csrc/stable/ @janeyx99 @mikaylagawarecki
/torch/headeronly/ @janeyx99
/torch/header_only_apis.txt @janeyx99
# FlexAttention
/torch/nn/attention/flex_attention.py @drisspg
/torch/_higher_order_ops/flex_attention.py @drisspg
/torch/_inductor/kernel/flex/ @drisspg
/torch/_inductor/codegen/cpp_flex_attention_template.py @drisspg
/test/inductor/test_flex_attention.py @drisspg
/test/inductor/test_flex_decoding.py @drisspg
# Low Precision GEMMs
/aten/src/ATen/native/cuda/Blas.cpp @drisspg @slayton58
/aten/src/ATen/cuda/CUDABlas.cpp @drisspg @slayton58
/aten/src/ATen/cuda/CUDABlas.h @drisspg @slayton58
/test/test_scaled_matmul_cuda.py @drisspg @slayton58

View File

@ -256,6 +256,7 @@ endif()
IF(USE_FBGEMM_GENAI)
set(FBGEMM_THIRD_PARTY ${PROJECT_SOURCE_DIR}/third_party/fbgemm/external/)
set(FBGEMM_GENAI_SRCS ${PROJECT_SOURCE_DIR}/third_party/fbgemm/fbgemm_gpu/experimental/gen_ai/src/quantize)
if(USE_CUDA)
# To avoid increasing the build time/binary size unnecessarily, use an allow-list of kernels to build.
# If you want to integrate a kernel from FBGEMM into torch, you have to add it here.
@ -288,62 +289,70 @@ IF(USE_FBGEMM_GENAI)
set_target_properties(fbgemm_genai PROPERTIES POSITION_INDEPENDENT_CODE ON)
set(fbgemm_genai_mx8mx8bf16_grouped
set(fbgemm_genai_cuh
"${FBGEMM_GENAI_SRCS}/cutlass_extensions/mx8mx8bf16_grouped/"
"${FBGEMM_GENAI_SRCS}/"
)
target_include_directories(fbgemm_genai PUBLIC
target_include_directories(fbgemm_genai PRIVATE
${FBGEMM_THIRD_PARTY}/cutlass/include
${FBGEMM_THIRD_PARTY}/cutlass/tools/util/include
${fbgemm_genai_mx8mx8bf16_grouped}
${fbgemm_genai_cuh}
${FBGEMM_GENAI_SRCS}/common/include/ # includes fbgemm_gpu/quantize/utils.h, fbgemm_gpu/quantize/tuning_cache.hpp
${FBGEMM_GENAI_SRCS}/include/ # includes fbgemm_gpu/torch_ops.h
)
else()
if(USE_ROCM)
# Only include the kernels we want to build to avoid increasing binary size.
file(GLOB_RECURSE fbgemm_genai_native_rocm_hip
"${FBGEMM_GENAI_SRCS}/ck_extensions/fp8_rowwise_grouped/kernels/fp8_rowwise_grouped*.hip"
"${FBGEMM_GENAI_SRCS}/ck_extensions/fp8_rowwise_grouped/fp8_rowwise_grouped_gemm.hip")
set_source_files_properties(${fbgemm_genai_native_rocm_hip} PROPERTIES HIP_SOURCE_PROPERTY_FORMAT 1)
# Add additional HIPCC compiler flags for performance
set(FBGEMM_GENAI_EXTRA_HIPCC_FLAGS
-mllvm
-amdgpu-coerce-illegal-types=1
-mllvm
-enable-post-misched=0
-mllvm
-greedy-reverse-local-assignment=1
-fhip-new-launch-api)
# Add FBGEMM_GENAI include directories for torch_ops.h
list(APPEND ATen_CUDA_INCLUDE ${PROJECT_SOURCE_DIR}/third_party/fbgemm/fbgemm_gpu/experimental/gen_ai/src/quantize/include)
list(APPEND ATen_CUDA_INCLUDE ${PROJECT_SOURCE_DIR}/third_party/fbgemm/fbgemm_gpu/experimental/gen_ai/src/quantize/common/include)
elseif(USE_ROCM)
# Only include the kernels we want to build to avoid increasing binary size.
file(GLOB_RECURSE fbgemm_genai_native_rocm_hip
"${FBGEMM_GENAI_SRCS}/ck_extensions/fp8_rowwise_grouped/kernels/fp8_rowwise_grouped*.hip"
"${FBGEMM_GENAI_SRCS}/ck_extensions/fp8_rowwise_grouped/fp8_rowwise_grouped_gemm.hip")
set_source_files_properties(${fbgemm_genai_native_rocm_hip} PROPERTIES HIP_SOURCE_PROPERTY_FORMAT 1)
# Only compile for gfx942 for now.
# This is rather hacky, I could not figure out a clean solution :(
set(HIP_CLANG_FLAGS_ORIGINAL ${HIP_CLANG_FLAGS})
string(REGEX REPLACE "--offload-arch=[^ ]*" "" FILTERED_HIP_CLANG_FLAGS "${HIP_CLANG_FLAGS}")
if("gfx942" IN_LIST PYTORCH_ROCM_ARCH)
list(APPEND FILTERED_HIP_CLANG_FLAGS --offload-arch=gfx942;)
# Add additional HIPCC compiler flags for performance
set(FBGEMM_GENAI_EXTRA_HIPCC_FLAGS
-mllvm
-enable-post-misched=0
-mllvm
-greedy-reverse-local-assignment=1
-fhip-new-launch-api)
if(DEFINED ROCM_VERSION_DEV AND ROCM_VERSION_DEV VERSION_LESS "7.2.0")
list(PREPEND FBGEMM_GENAI_EXTRA_HIPCC_FLAGS -mllvm -amdgpu-coerce-illegal-types=1)
endif()
set(HIP_CLANG_FLAGS ${FILTERED_HIP_CLANG_FLAGS})
hip_add_library(
fbgemm_genai STATIC
${fbgemm_genai_native_rocm_hip}
HIPCC_OPTIONS ${HIP_HCC_FLAGS} ${FBGEMM_GENAI_EXTRA_HIPCC_FLAGS})
set(HIP_CLANG_FLAGS ${HIP_CLANG_FLAGS_ORIGINAL})
set_target_properties(fbgemm_genai PROPERTIES POSITION_INDEPENDENT_CODE ON)
target_compile_definitions(fbgemm_genai PRIVATE FBGEMM_GENAI_NO_EXTENDED_SHAPES)
target_include_directories(fbgemm_genai PUBLIC
# FBGEMM version of Composable Kernel is used due to some customizations
${FBGEMM_THIRD_PARTY}/composable_kernel/include
${FBGEMM_THIRD_PARTY}/composable_kernel/library/include
${FBGEMM_THIRD_PARTY}/cutlass/include
${FBGEMM_THIRD_PARTY}/cutlass/tools/util/include
${FBGEMM_GENAI_SRCS}/common/include/ # includes fbgemm_gpu/quantize/utils.h, fbgemm_gpu/quantize/tuning_cache.hpp
${FBGEMM_GENAI_SRCS}/include/ # includes fbgemm_gpu/torch_ops.h
)
# Only compile for gfx942 for now.
# This is rather hacky, I could not figure out a clean solution :(
set(HIP_CLANG_FLAGS_ORIGINAL ${HIP_CLANG_FLAGS})
string(REGEX REPLACE "--offload-arch=[^ ]*" "" FILTERED_HIP_CLANG_FLAGS "${HIP_CLANG_FLAGS}")
if("gfx942" IN_LIST PYTORCH_ROCM_ARCH)
list(APPEND FILTERED_HIP_CLANG_FLAGS --offload-arch=gfx942;)
endif()
set(HIP_CLANG_FLAGS ${FILTERED_HIP_CLANG_FLAGS})
hip_add_library(
fbgemm_genai STATIC
${fbgemm_genai_native_rocm_hip}
HIPCC_OPTIONS ${HIP_HCC_FLAGS} ${FBGEMM_GENAI_EXTRA_HIPCC_FLAGS})
set(HIP_CLANG_FLAGS ${HIP_CLANG_FLAGS_ORIGINAL})
set_target_properties(fbgemm_genai PROPERTIES POSITION_INDEPENDENT_CODE ON)
target_compile_definitions(fbgemm_genai PRIVATE FBGEMM_GENAI_NO_EXTENDED_SHAPES)
target_include_directories(fbgemm_genai PRIVATE
# FBGEMM version of Composable Kernel is used due to some customizations
${FBGEMM_THIRD_PARTY}/composable_kernel/include
${FBGEMM_THIRD_PARTY}/composable_kernel/library/include
${FBGEMM_THIRD_PARTY}/cutlass/include
${FBGEMM_THIRD_PARTY}/cutlass/tools/util/include
${FBGEMM_GENAI_SRCS}/common/include/ # includes fbgemm_gpu/quantize/utils.h, fbgemm_gpu/quantize/tuning_cache.hpp
${FBGEMM_GENAI_SRCS}/include/ # includes fbgemm_gpu/torch_ops.h
)
# Add FBGEMM_GENAI include directories for torch_ops.h
list(APPEND ATen_HIP_INCLUDE ${PROJECT_SOURCE_DIR}/third_party/fbgemm/fbgemm_gpu/experimental/gen_ai/src/quantize/include)
list(APPEND ATen_HIP_INCLUDE ${PROJECT_SOURCE_DIR}/third_party/fbgemm/fbgemm_gpu/experimental/gen_ai/src/quantize/common/include)
endif()
endif()
@ -692,12 +701,6 @@ if(USE_CUDA AND NOT USE_ROCM)
list(APPEND ATen_CUDA_INCLUDE ${CMAKE_CURRENT_SOURCE_DIR}/../../../third_party/cutlass/include)
list(APPEND ATen_CUDA_INCLUDE ${CMAKE_CURRENT_SOURCE_DIR}/../../../third_party/cutlass/tools/util/include)
# Add FBGEMM_GENAI include directories for torch_ops.h
if(USE_FBGEMM_GENAI)
list(APPEND ATen_CUDA_INCLUDE ${CMAKE_CURRENT_SOURCE_DIR}/../../../third_party/fbgemm/fbgemm_gpu/experimental/gen_ai/src/quantize/include)
list(APPEND ATen_CUDA_INCLUDE ${CMAKE_CURRENT_SOURCE_DIR}/../../../third_party/fbgemm/fbgemm_gpu/experimental/gen_ai/src/quantize/common/include)
endif()
if($ENV{ATEN_STATIC_CUDA})
if(CUDA_VERSION VERSION_LESS_EQUAL 12.9)
list(APPEND ATen_CUDA_DEPENDENCY_LIBS

View File

@ -19,6 +19,7 @@
#include <ATen/detail/MPSHooksInterface.h>
#include <ATen/detail/MTIAHooksInterface.h>
#include <ATen/detail/PrivateUse1HooksInterface.h>
#include <ATen/detail/XLAHooksInterface.h>
#include <ATen/detail/XPUHooksInterface.h>
#include <c10/core/QEngine.h>
#include <c10/core/impl/DeviceGuardImplInterface.h>
@ -88,6 +89,8 @@ class TORCH_API Context {
return at::detail::getHIPHooks();
} else if (opt_device_type == at::kHPU) {
return at::detail::getHPUHooks();
} else if (opt_device_type == at::kXLA) {
return at::detail::getXLAHooks();
} else {
TORCH_CHECK(
false,
@ -196,7 +199,7 @@ class TORCH_API Context {
return c10::impl::hasDeviceGuardImpl(c10::DeviceType::IPU);
}
static bool hasXLA() {
return c10::impl::hasDeviceGuardImpl(c10::DeviceType::XLA);
return detail::getXLAHooks().hasXLA();
}
static bool hasXPU() {
return detail::getXPUHooks().hasXPU();

View File

@ -389,37 +389,16 @@ void fillVersion<DLManagedTensorVersioned>(
// constructed out of ATen tensor
template <class T>
T* toDLPackImpl(const Tensor& src) {
auto view = src;
// Detect whether there is need to normalize the strides
// Background: gh-83069
//
// However, normalizing strides can come at a high-cost
// to slow down toDLPack conversion 3x, so we
// only normalize if needed.
//
// The following code detects whether the src follows
// a continuous pattern. If the src follows such pattern (common-case)
// then we do not need to normalize the strides.
bool need_normalize_strides = src.dim() == 1 && src.size(0) == 1 && src.stride(0) != 1;
// less common case, try normalizing the strides
if (need_normalize_strides) {
// create a new tensor with possibly normalized strides
// gh-83069
auto shape = src.sizes();
view = src.as_strided(shape, {1}, src.storage_offset());
}
ATenDLMTensor<T>* atDLMTensor(new ATenDLMTensor<T>);
atDLMTensor->handle = view;
atDLMTensor->handle = src;
atDLMTensor->tensor.manager_ctx = atDLMTensor;
atDLMTensor->tensor.deleter = &deleter<T>;
atDLMTensor->tensor.dl_tensor.data = view.data_ptr();
atDLMTensor->tensor.dl_tensor.data = src.data_ptr();
atDLMTensor->tensor.dl_tensor.device = torchDeviceToDLDevice(src.device());
atDLMTensor->tensor.dl_tensor.ndim = static_cast<int32_t>(src.dim());
atDLMTensor->tensor.dl_tensor.dtype = getDLDataType(src);
atDLMTensor->tensor.dl_tensor.shape = const_cast<int64_t*>(view.sizes().data());
atDLMTensor->tensor.dl_tensor.strides = const_cast<int64_t*>(view.strides().data());
atDLMTensor->tensor.dl_tensor.shape = const_cast<int64_t*>(src.sizes().data());
atDLMTensor->tensor.dl_tensor.strides = const_cast<int64_t*>(src.strides().data());
atDLMTensor->tensor.dl_tensor.byte_offset = 0;
fillVersion(&atDLMTensor->tensor);

View File

@ -52,16 +52,16 @@ struct DLPackTraits {};
template <>
struct DLPackTraits<DLManagedTensor> {
inline static const char* capsule = "dltensor";
inline static const char* used = "used_dltensor";
inline static constexpr const char* capsule = "dltensor";
inline static constexpr const char* used = "used_dltensor";
inline static auto toDLPack = at::toDLPack;
inline static auto fromDLPack = at::fromDLPack;
};
template <>
struct DLPackTraits<DLManagedTensorVersioned> {
inline static const char* capsule = "dltensor_versioned";
inline static const char* used = "used_dltensor_versioned";
inline static constexpr const char* capsule = "dltensor_versioned";
inline static constexpr const char* used = "used_dltensor_versioned";
inline static auto toDLPack = at::toDLPackVersioned;
inline static auto fromDLPack = at::fromDLPackVersioned;
};

View File

@ -42,8 +42,14 @@ const PythonTorchFunctionTLS& PythonTorchFunctionTLS::get_state() {
}
bool torch_function_mode_enabled() {
return PythonTorchFunctionTLS::get_disabled_state() != TorchFunctionDisabledState::ALL_DISABLED &&
PythonTorchFunctionTLS::stack_len() > 0;
// Manually flatten because gcc is refusing to inline here. Note
// that we are still calling __tls_get_addr twice here with GCC,
// presumably because of
// https://gcc.gnu.org/bugzilla/show_bug.cgi?id=81501 (which says
// the fix ships in GCC 16), but forcing inlining still improves
// performance.
const auto& ptfs = pythonTorchFunctionState;
return ptfs.disabled_state_ != TorchFunctionDisabledState::ALL_DISABLED && !ptfs.stack_.empty();
}
// This is needed to disambiguate the ternary torch function disabled states

View File

@ -27,6 +27,7 @@ struct TORCH_API PythonTorchFunctionTLS {
TorchFunctionDisabledState disabled_state_ =
TorchFunctionDisabledState::ENABLED;
std::vector<std::shared_ptr<c10::SafePyObject>> stack_;
friend TORCH_API bool torch_function_mode_enabled();
};
TORCH_API bool torch_function_mode_enabled();

View File

@ -39,7 +39,7 @@ struct HostBlock {
};
template <typename B>
struct alignas(64) FreeBlockList {
struct alignas(hardware_destructive_interference_size) FreeBlockList {
std::mutex mutex_;
std::deque<B*> list_;
};
@ -122,7 +122,7 @@ struct TORCH_API HostStats {
// Struct containing memory allocator summary statistics for host, as they
// are staged for reporting. This is a temporary struct that is used to
// avoid locking the allocator while collecting stats.
struct alignas(64) HostStatsStaged {
struct alignas(hardware_destructive_interference_size) HostStatsStaged {
std::mutex timing_mutex_;
// COUNT: total allocations (active + free)
// LOCK: access to this stat is protected by the allocator's blocks_mutex_
@ -669,7 +669,7 @@ struct CachingHostAllocatorImpl {
TORCH_CHECK_NOT_IMPLEMENTED(false, "Not implemented for query_event");
}
alignas(64) std::mutex blocks_mutex_;
alignas(hardware_destructive_interference_size) std::mutex blocks_mutex_;
ska::flat_hash_set<B*> blocks_; // block list
ska::flat_hash_map<void*, B*> ptr_to_block_;
@ -677,17 +677,17 @@ struct CachingHostAllocatorImpl {
// size. This allows us to quickly find a free block of the right size.
// We use deque to store per size free list and guard the list with its own
// mutex.
alignas(64) std::vector<FreeBlockList<B>> free_list_ =
alignas(hardware_destructive_interference_size) std::vector<FreeBlockList<B>> free_list_ =
std::vector<FreeBlockList<B>>(MAX_SIZE_INDEX);
alignas(64) std::mutex events_mutex_;
alignas(hardware_destructive_interference_size) std::mutex events_mutex_;
std::deque<std::pair<E, B*>> events_; // event queue paired with block
// Indicates whether the object is active.
// Set to false in the destructor to signal background threads to stop.
std::atomic<bool> active_{true};
protected:
alignas(64) HostStatsStaged stats_;
alignas(hardware_destructive_interference_size) HostStatsStaged stats_;
};
struct TORCH_API HostAllocator : public at::Allocator {

View File

@ -59,9 +59,7 @@ struct TORCH_API Generator {
explicit Generator(c10::intrusive_ptr<c10::GeneratorImpl> gen_impl)
: impl_(std::move(gen_impl)) {
if (impl_.get() == nullptr) {
throw std::runtime_error("GeneratorImpl with nullptr is not supported");
}
TORCH_CHECK(impl_.get(), "GeneratorImpl with nullptr is not supported");
}
bool operator==(const Generator& rhs) const {

View File

@ -229,10 +229,10 @@ private:
}
static const uint32_t kPhilox10A = 0x9E3779B9;
static const uint32_t kPhilox10B = 0xBB67AE85;
static const uint32_t kPhiloxSA = 0xD2511F53;
static const uint32_t kPhiloxSB = 0xCD9E8D57;
static constexpr uint32_t kPhilox10A = 0x9E3779B9;
static constexpr uint32_t kPhilox10B = 0xBB67AE85;
static constexpr uint32_t kPhiloxSA = 0xD2511F53;
static constexpr uint32_t kPhiloxSB = 0xCD9E8D57;
};
typedef philox_engine Philox4_32;

View File

@ -111,9 +111,7 @@ class TORCH_API TensorBase {
explicit TensorBase(
c10::intrusive_ptr<TensorImpl, UndefinedTensorImpl> tensor_impl)
: impl_(std::move(tensor_impl)) {
if (impl_.get() == nullptr) {
throw std::runtime_error("TensorImpl with nullptr is not supported");
}
TORCH_CHECK(impl_.get(), "TensorImpl with nullptr is not supported");
}
TensorBase(const TensorBase&) = default;
TensorBase(TensorBase&&) noexcept = default;

View File

@ -109,6 +109,10 @@ TORCH_LIBRARY_IMPL(_, AutogradHPU, m) {
m.fallback(AUTOGRAD_FALLBACK);
}
TORCH_LIBRARY_IMPL(_, AutogradPrivateUse1, m) {
m.fallback(AUTOGRAD_FALLBACK);
}
#undef AUTOGRAD_FALLBACK
} // namespace

View File

@ -442,11 +442,17 @@ RegistrationHandleRAII Dispatcher::registerFallback(DispatchKey dispatchKey, Ker
auto idx = getDispatchTableIndexForDispatchKey(dispatchKey);
TORCH_CHECK(idx >= 0 && static_cast<uint64_t>(idx) < backendFallbackKernels_.size(), "idx=", idx);
// NB: Perserve BC for registering fallback for AutogradPrivateUse1 multiple time,
// refer to https://github.com/pytorch/pytorch/issues/163979 for more informations.
TORCH_CHECK(
!backendFallbackKernels_[idx].kernel.isValid(),
"Tried to register multiple backend fallbacks for the same dispatch key ", dispatchKey, "; previous registration ",
backendFallbackKernels_[idx].debug, ", new registration ", debug
);
dispatchKey == DispatchKey::AutogradPrivateUse1 ||
!backendFallbackKernels_[idx].kernel.isValid(),
"Tried to register multiple backend fallbacks for the same dispatch key ",
dispatchKey,
"; previous registration ",
backendFallbackKernels_[idx].debug,
", new registration ",
debug);
// NB: inferred function schema is always nullptr for fallbacks, as fallbacks
// cannot be unboxed
backendFallbackKernels_[idx] = impl::AnnotatedKernel(std::move(kernel), nullptr, std::move(debug));

View File

@ -68,11 +68,7 @@ Symbol InternedStrings::_symbol(const std::string& s) {
return it->second;
auto pos = s.find("::");
if (pos == std::string::npos) {
std::stringstream ss;
ss << "all symbols must have a namespace, <namespace>::<string>, but found: " << s;
throw std::runtime_error(ss.str());
}
TORCH_CHECK(pos != std::string::npos, "all symbols must have a namespace, <namespace>::<string>, but found: ", s);
Symbol ns = _symbol("namespaces::" + s.substr(0, pos));
Symbol sym(sym_to_info_.size());
@ -121,12 +117,7 @@ std::string Symbol::domainString() const {
}
Symbol Symbol::fromDomainAndUnqualString(const std::string & d, const std::string & s) {
if (d.compare(0, domain_prefix().size(), domain_prefix()) != 0) {
std::ostringstream ss;
ss << "Symbol: domain string is expected to be prefixed with '"
<< domain_prefix() << "', e.g. 'org.pytorch.aten'";
throw std::runtime_error(ss.str());
}
TORCH_CHECK(d.compare(0, domain_prefix().size(), domain_prefix()) == 0, "Symbol: domain string is expected to be prefixed with '", domain_prefix(), "', e.g. 'org.pytorch.aten'");
std::string qualString = d.substr(domain_prefix().size()) + "::" + s;
return fromQualString(qualString);
}

View File

@ -7,6 +7,7 @@
#include <ATen/core/jit_type.h>
#include <ATen/core/stack.h>
#include <ATen/core/type_factory.h>
#include <c10/util/Exception.h>
#include <c10/util/StringUtil.h>
#include <c10/util/hash.h>
#include <c10/util/irange.h>
@ -412,7 +413,7 @@ size_t IValue::hash(const IValue& v) {
case Tag::Enum:
case Tag::Stream:
case Tag::Uninitialized:
throw std::runtime_error(
TORCH_CHECK(false,
"unhashable type: '" + v.type()->repr_str() + "'");
}
// the above switch should be exhaustive

View File

@ -624,7 +624,14 @@ struct TORCH_API IValue final {
IValue(const c10::SymBool& i) {
if (auto mi = i.maybe_as_bool()) {
tag = Tag::Bool;
#if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__
payload.u.as_int = *mi;
#elif __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__
/* due to byteorder if value assigned as_int, as_bool actually is not set correctly */
payload.u.as_bool = *mi;
#else
#error Unexpected or undefined __BYTE_ORDER__
#endif
} else {
tag = Tag::SymBool;
payload.u.as_intrusive_ptr = i.toSymNodeImpl().release();

View File

@ -8,6 +8,7 @@
#include <ATen/core/type_factory.h>
#include <ATen/core/qualified_name.h>
#include <c10/util/TypeList.h>
#include <c10/util/Exception.h>
#include <optional>
#include <c10/core/SymFloat.h>
#include <c10/core/SymBool.h>
@ -116,10 +117,8 @@ struct SingleElementType : public SharedType {
protected:
SingleElementType(TypePtr elem) : SharedType(Kind), elem(std::move(elem)) {
if (!this->elem) {
throw std::runtime_error(c10::str(
TORCH_CHECK(this->elem, c10::str(
"Can not create ", typeKindToString(Kind), " with None type"));
}
}
private:
@ -416,16 +415,12 @@ struct TORCH_API SymbolicShape {
}
ShapeSymbol operator[](size_t i) const {
if (!dims_) {
throw std::runtime_error("Rank isn't fixed");
}
TORCH_CHECK(dims_, "Rank isn't fixed");
return (*dims_).at(i);
}
ShapeSymbol at(size_t i) const {
if (!dims_) {
throw std::runtime_error("Rank isn't fixed");
}
TORCH_CHECK(dims_, "Rank isn't fixed");
return (*dims_).at(i);
}
@ -520,9 +515,7 @@ struct VaryingShape {
}
const std::optional<T> &operator[](size_t i) const {
if (!dims_) {
throw std::runtime_error("Rank isn't fixed");
}
TORCH_CHECK(dims_, "Rank isn't fixed");
return (*dims_).at(i);
}
@ -957,9 +950,7 @@ struct TORCH_API DictType : public SharedType {
TypePtr createWithContained(
std::vector<TypePtr> contained_types) const override {
if (contained_types.size() != 2) {
throw std::runtime_error("Expected 2 contained types");
}
TORCH_CHECK(contained_types.size() == 2, "Expected 2 contained types");
return create(std::move(contained_types.at(0)), std::move(contained_types.at(1)));
}

View File

@ -185,11 +185,11 @@ struct TORCH_API Type {
: repr_(nullptr) {}
/* implicit */ SingletonOrSharedTypePtr(SingletonTypePtr<T> p)
: repr_(p) {}
: repr_(makeSingletonSharedPtr(p.get())) {}
template <typename U, std::enable_if_t<std::is_convertible_v<U*, T*>, bool> = true>
/* implicit */ SingletonOrSharedTypePtr(SingletonTypePtr<U> p)
: repr_(SingletonTypePtr<T>(p.get())) {}
: repr_(makeSingletonSharedPtr(static_cast<T*>(p.get()))) {}
// We need to support construction from T* for pybind. The problem
@ -202,8 +202,8 @@ struct TORCH_API Type {
// Case 2: if T is exactly Type, we need to do a dynamic_cast to
// check if it's a SharedType and do the right thing.
//
// Case 3: Otherwise, T is not a SharedType. (debug-check this
// assumption!) Use a singleton pointer.
// Case 3: Otherwise, T is not a SharedType. Use a singleton
// pointer.
template <typename U = T, std::enable_if_t<std::is_base_of_v<SharedType, U>, bool> = true>
/* implicit */ SingletonOrSharedTypePtr(T* p) : SingletonOrSharedTypePtr(static_cast<typename detail::as_shared_type<U>::type>(p)->shared_from_this()) {}
@ -211,15 +211,15 @@ struct TORCH_API Type {
template <typename U = T, std::enable_if_t<std::is_same_v<Type, U>, bool> = true>
/* implicit */ SingletonOrSharedTypePtr(T* p) {
if (auto* shared_p = dynamic_cast<typename detail::as_shared_type<U>::type>(p)) {
repr_ = Repr(shared_p->shared_from_this());
repr_ = shared_p->shared_from_this();
} else {
repr_ = Repr(p);
repr_ = makeSingletonSharedPtr(p);
}
}
template <typename U = T, std::enable_if_t<!std::is_same_v<Type, U> && !std::is_base_of_v<SharedType, U>, bool> = true>
/* implicit */ SingletonOrSharedTypePtr(T* p)
: repr_(p) {
: repr_(makeSingletonSharedPtr(p)) {
TORCH_INTERNAL_ASSERT_DEBUG_ONLY(dynamic_cast<typename detail::as_shared_type<U>::type>(p) == nullptr);
}
@ -230,19 +230,19 @@ struct TORCH_API Type {
~SingletonOrSharedTypePtr() = default;
T* get() const {
return repr_.isSharedAndNonNull() ? repr_.shared_.repr_.get() : static_cast<T*>(repr_.rawRepr().first);
return repr_.get();
}
operator bool() const {
return repr_.isNonNull();
return repr_ != nullptr;
}
bool operator==(std::nullptr_t) const {
return !repr_.isNonNull();
return repr_ == nullptr;
}
bool operator!=(std::nullptr_t) const {
return repr_.isNonNull();
return repr_ != nullptr;
}
template <typename U = T, std::enable_if_t<!std::is_same_v<std::remove_const_t<U>, void>, bool> = true>
@ -255,138 +255,14 @@ struct TORCH_API Type {
}
private:
// NOTE: SharedPtrWrapper exists to work around a baffling bug in
// nvcc; see comment in destroy() below.
struct SharedPtrWrapper {
SharedPtrWrapper(std::shared_ptr<T> &&x)
: repr_(std::move(x)) {}
std::shared_ptr<T> repr_;
};
union Repr {
Repr() : Repr(nullptr) {}
// Use shared_ptr's aliasing constructor to create a non-owning pointer
// to a singleton. The lifetime is tied to the null shared_ptr, so there's
// no reference counting overhead for the singleton itself.
static std::shared_ptr<T> makeSingletonSharedPtr(T* ptr) {
return std::shared_ptr<T>(std::shared_ptr<T>(), ptr);
}
explicit Repr(std::shared_ptr<T> x)
: shared_(std::move(x)) {}
explicit Repr(std::nullptr_t)
: singletonRepr_(nullptr) {}
explicit Repr(SingletonTypePtr<T> p)
: singletonRepr_(p.get()) {}
~Repr() {
destroy();
}
// NOTE: the only non-UB way to access our null state is through
// rawRepr(), because our copy operation doesn't preserve which
// union member is active for null pointers.
Repr(const Repr& rhs) {
if (rhs.isSharedAndNonNull()) {
new (&shared_) SharedPtrWrapper(rhs.shared_);
} else {
singletonRepr_.singleton_ = static_cast<T*>(rhs.rawRepr().first);
TORCH_INTERNAL_ASSERT_DEBUG_ONLY(rhs.singletonRepr_.unused_ == nullptr);
singletonRepr_.unused_ = nullptr;
}
}
Repr(Repr&& rhs) noexcept {
if (rhs.isSharedAndNonNull()) {
new (&shared_) SharedPtrWrapper(std::move(rhs.shared_));
} else {
singletonRepr_.singleton_ = static_cast<T*>(rhs.rawRepr().first);
TORCH_INTERNAL_ASSERT_DEBUG_ONLY(rhs.singletonRepr_.unused_ == nullptr);
singletonRepr_.unused_ = nullptr;
}
}
Repr& operator=(const Repr& rhs) {
if (&rhs == this) {
return *this;
}
if (rhs.isSharedAndNonNull()) {
if (isSharedAndNonNull()) {
shared_ = rhs.shared_;
} else {
new (&shared_) SharedPtrWrapper(rhs.shared_);
}
} else {
if (isSharedAndNonNull()) {
destroy();
}
singletonRepr_.singleton_ = static_cast<T*>(rhs.rawRepr().first);
TORCH_INTERNAL_ASSERT_DEBUG_ONLY(rhs.rawRepr().nullIfSingleton_ == nullptr);
singletonRepr_.unused_ = nullptr;
}
return *this;
}
Repr& operator=(Repr&& rhs) noexcept {
if (&rhs == this) {
return *this;
}
if (rhs.isSharedAndNonNull()) {
if (isSharedAndNonNull()) {
shared_ = std::move(rhs.shared_);
} else {
new (&shared_) SharedPtrWrapper(std::move(rhs.shared_));
}
} else {
if (isSharedAndNonNull()) {
destroy();
}
singletonRepr_.singleton_ = static_cast<T*>(rhs.rawRepr().first);
TORCH_INTERNAL_ASSERT_DEBUG_ONLY(rhs.rawRepr().nullIfSingleton_ == nullptr);
singletonRepr_.unused_ = nullptr;
}
return *this;
}
SharedPtrWrapper shared_;
struct SingletonRepr {
explicit SingletonRepr(T* s) : singleton_(s) {}
T* singleton_;
void* unused_ = nullptr;
} singletonRepr_;
struct RawRepr {
void* first;
void* nullIfSingleton_;
};
// It is UB to read the singleton part of Repr if it was
// constructed as a shared_ptr and vice versa, but memcpying out
// the representation is always OK, so here's an accessor to obey
// the letter of the law.
RawRepr rawRepr() const {
RawRepr repr{};
memcpy(&repr, reinterpret_cast<const char *>(this), sizeof(RawRepr));
return repr;
}
bool isNonNull() const {
auto repr = rawRepr();
TORCH_INTERNAL_ASSERT_DEBUG_ONLY(repr.nullIfSingleton_ == nullptr || repr.first != nullptr);
return repr.first != nullptr;
}
bool isSharedAndNonNull() const {
return rawRepr().nullIfSingleton_ != nullptr;
}
private:
void destroy() {
if (isSharedAndNonNull()) {
// Without SharedPtrWrapper, this line would read
// `shared_.~shared_ptr()` and nvcc would complain with
// "error: expected primary-expression before '>' token"
// referring to the "t" in "shared_ptr". SharedPtrWrapper
// exists to work around this compiler bug.
shared_.~SharedPtrWrapper();
}
}
} repr_;
std::shared_ptr<T> repr_;
};
using TypePtr = SingletonOrSharedTypePtr<Type>;

View File

@ -8,6 +8,7 @@
#include <ATen/core/jit_type.h>
#include <c10/macros/Macros.h>
#include <c10/util/env.h>
#include <c10/util/Exception.h>
#include <c10/util/flat_hash_map.h>
#include <c10/util/irange.h>
#include <array>
@ -826,9 +827,7 @@ TupleType::TupleType(
: NamedType(TypeKind::TupleType, std::move(name)),
elements_(std::move(elements)),
has_free_variables_(std::any_of(elements_.begin(), elements_.end(), [](const TypePtr& v) {
if (!v) {
throw std::runtime_error("Can not create tuple with None type");
}
TORCH_CHECK(v, "Can not create tuple with None type");
return v->hasFreeVariables();
})), schema_(std::move(schema)) {

View File

@ -104,71 +104,6 @@ class Vectorized<float> {
}
return b;
}
// Implementation is picked from
// https://github.com/ARM-software/ComputeLibrary/blob/v25.01/src/core/NEON/SVEMath.inl#L105
inline svfloat32_t svexp_f32_z(svbool_t pg, svfloat32_t x) const {
const auto c1 =
svreinterpret_f32_u32(svdup_n_u32(0x3f7ffff6)); // x^1: 0x1.ffffecp-1f
const auto c2 =
svreinterpret_f32_u32(svdup_n_u32(0x3efffedb)); // x^2: 0x1.fffdb6p-2f
const auto c3 =
svreinterpret_f32_u32(svdup_n_u32(0x3e2aaf33)); // x^3: 0x1.555e66p-3f
const auto c4 =
svreinterpret_f32_u32(svdup_n_u32(0x3d2b9f17)); // x^4: 0x1.573e2ep-5f
const auto c5 =
svreinterpret_f32_u32(svdup_n_u32(0x3c072010)); // x^5: 0x1.0e4020p-7f
const auto shift = svreinterpret_f32_u32(
svdup_n_u32(0x4b00007f)); // 2^23 + 127 = 0x1.0000fep23f
const auto inv_ln2 = svreinterpret_f32_u32(
svdup_n_u32(0x3fb8aa3b)); // 1 / ln(2) = 0x1.715476p+0f
const auto neg_ln2_hi = svreinterpret_f32_u32(svdup_n_u32(
0xbf317200)); // -ln(2) from bits -1 to -19: -0x1.62e400p-1f
const auto neg_ln2_lo = svreinterpret_f32_u32(svdup_n_u32(
0xb5bfbe8e)); // -ln(2) from bits -20 to -42: -0x1.7f7d1cp-20f
const auto inf = svdup_n_f32(std::numeric_limits<float>::infinity());
const auto max_input = svdup_n_f32(88.37f); // Approximately ln(2^127.5)
const auto zero = svdup_n_f32(0.f);
const auto min_input = svdup_n_f32(-86.64f); // Approximately ln(2^-125)
// Range reduction:
// e^x = 2^n * e^r
// where:
// n = floor(x / ln(2))
// r = x - n * ln(2)
//
// By adding x / ln(2) with 2^23 + 127 (shift):
// * As FP32 fraction part only has 23-bits, the addition of 2^23 + 127
// forces decimal part
// of x / ln(2) out of the result. The integer part of x / ln(2) (i.e.
// n) + 127 will occupy the whole fraction part of z in FP32 format.
// Subtracting 2^23 + 127 (shift) from z will result in the integer part
// of x / ln(2) (i.e. n) because the decimal part has been pushed out
// and lost.
// * The addition of 127 makes the FP32 fraction part of z ready to be
// used as the exponent
// in FP32 format. Left shifting z by 23 bits will result in 2^n.
const auto z = svmla_f32_z(pg, shift, x, inv_ln2);
const auto n = svsub_f32_z(pg, z, shift);
const auto scale = svreinterpret_f32_u32(
svlsl_n_u32_z(pg, svreinterpret_u32_f32(z), 23)); // 2^n
// The calculation of n * ln(2) is done using 2 steps to achieve accuracy
// beyond FP32. This outperforms longer Taylor series (3-4 tabs) both in
// term of accuracy and performance.
const auto r_hi = svmla_f32_z(pg, x, n, neg_ln2_hi);
const auto r = svmla_f32_z(pg, r_hi, n, neg_ln2_lo);
// Compute the truncated Taylor series of e^r.
// poly = scale * (1 + c1 * r + c2 * r^2 + c3 * r^3 + c4 * r^4 + c5 * r^5)
const auto r2 = svmul_f32_z(pg, r, r);
const auto p1 = svmul_f32_z(pg, c1, r);
const auto p23 = svmla_f32_z(pg, c2, c3, r);
const auto p45 = svmla_f32_z(pg, c4, c5, r);
const auto p2345 = svmla_f32_z(pg, p23, p45, r2);
const auto p12345 = svmla_f32_z(pg, p1, p2345, r2);
auto poly = svmla_f32_z(pg, scale, p12345, scale);
// Handle underflow and overflow.
poly = svsel_f32(svcmplt_f32(pg, x, min_input), zero, poly);
poly = svsel_f32(svcmpgt_f32(pg, x, max_input), inf, poly);
return poly;
}
static Vectorized<float> loadu(const void* ptr, int64_t count = size()) {
if (count == size())
return svld1_f32(ptrue, reinterpret_cast<const float*>(ptr));
@ -313,11 +248,41 @@ class Vectorized<float> {
return USE_SLEEF(
Vectorized<float>(Sleef_expm1fx_u10sve(values)), map(std::expm1));
}
// Implementation copied from Arm Optimized Routines:
// https://github.com/ARM-software/optimized-routines/blob/master/math/aarch64/sve/expf.c
Vectorized<float> exp_u20() const {
return exp();
// special case to handle special inputs that are too large or too small
// i.e. where there's at least one element x, s.t. |x| >= 87.3...
svbool_t is_special_case = svacgt(svptrue_b32(), values, 0x1.5d5e2ap+6f);
if (svptest_any(svptrue_b32(), is_special_case)) {
return exp();
}
const svfloat32_t ln2_hi = svdup_n_f32(0x1.62e4p-1f);
const svfloat32_t ln2_lo = svdup_n_f32(0x1.7f7d1cp-20f);
const svfloat32_t c1 = svdup_n_f32(0.5f);
const svfloat32_t inv_ln2 = svdup_n_f32(0x1.715476p+0f);
const float shift = 0x1.803f8p17f;
/* n = round(x/(ln2/N)). */
svfloat32_t z = svmad_x(svptrue_b32(), inv_ln2, values, shift);
svfloat32_t n = svsub_x(svptrue_b32(), z, shift);
/* r = x - n*ln2/N. */
svfloat32_t r = values;
r = svmls_x(svptrue_b32(), r, n, ln2_hi);
r = svmls_x(svptrue_b32(), r, n, ln2_lo);
/* scale = 2^(n/N). */
svfloat32_t scale = svexpa(svreinterpret_u32(z));
/* poly(r) = exp(r) - 1 ~= r + 0.5 r^2. */
svfloat32_t r2 = svmul_x(svptrue_b32(), r, r);
svfloat32_t poly = svmla_x(svptrue_b32(), r, r2, c1);
return svmla_x(svptrue_b32(), scale, scale, poly);
}
Vectorized<float> fexp_u20() const {
return exp();
return exp_u20();
}
Vectorized<float> fmod(const Vectorized<float>& q) const {USE_SLEEF(
{ return Vectorized<float>(Sleef_fmodfx_sve(values, q)); },
@ -453,9 +418,11 @@ class Vectorized<float> {
ptrue, svmax_f32_z(ptrue, values, CONST_MIN_TANH), CONST_MAX_TANH);
// Step 2: Calculate exp(2 * x), where x is the clamped value.
// svmul_f32_z computes 2 * x, and svexp_f32_z computes the exponential of
// the result.
svfloat32_t exp2x = svexp_f32_z(ptrue, svmul_f32_z(ptrue, CONST_2, x));
// svmul_f32_z computes 2 * x, and exp_u20() computes the exponential of
// the result (via Vectorized<float>, then auto-converts back to
// svfloat32_t).
svfloat32_t exp2x =
Vectorized<float>(svmul_f32_z(ptrue, CONST_2, x)).exp_u20();
// Step 3: Calculate the numerator of the tanh function, which is exp(2x)
// - 1.

View File

@ -6,8 +6,11 @@
#ifdef __aarch64__
#if !defined(CPU_CAPABILITY_SVE)
#include <ATen/cpu/vec/vec128/vec128_bfloat16_neon.h>
#include <ATen/cpu/vec/vec128/vec128_double_neon.h>
#include <ATen/cpu/vec/vec128/vec128_float_neon.h>
#include <ATen/cpu/vec/vec128/vec128_half_neon.h>
#include <ATen/cpu/vec/vec128/vec128_int_aarch64.h>
#include <ATen/cpu/vec/vec128/vec128_uint_aarch64.h>
#endif
#include <ATen/cpu/vec/vec128/vec128_convert.h>

View File

@ -354,9 +354,47 @@ class Vectorized<c10::BFloat16> : public Vectorized16<
DEFINE_UNARY_ELEMENTWISE_FUNC_VIA_FLOAT_METHOD(abs)
Vectorized frac() const;
DEFINE_UNARY_ELEMENTWISE_FUNC_VIA_FLOAT_METHOD(neg)
DEFINE_UNARY_ELEMENTWISE_FUNC_VIA_FLOAT_METHOD(trunc)
DEFINE_UNARY_ELEMENTWISE_FUNC_VIA_FLOAT_METHOD(sqrt)
#ifdef __ARM_FEATURE_BF16
Vectorized<c10::BFloat16> neg() const {
return -values;
}
Vectorized<c10::BFloat16> reciprocal() const {
return 1.0f / values;
}
Vectorized<c10::BFloat16> operator==(
const Vectorized<c10::BFloat16>& other) const {
return values == other.values;
}
Vectorized<c10::BFloat16> operator!=(
const Vectorized<c10::BFloat16>& other) const {
return values != other.values;
}
Vectorized<c10::BFloat16> operator<(
const Vectorized<c10::BFloat16>& other) const {
return values < other.values;
}
Vectorized<c10::BFloat16> operator<=(
const Vectorized<c10::BFloat16>& other) const {
return values <= other.values;
}
Vectorized<c10::BFloat16> operator>(
const Vectorized<c10::BFloat16>& other) const {
return values > other.values;
}
Vectorized<c10::BFloat16> operator>=(
const Vectorized<c10::BFloat16>& other) const {
return values >= other.values;
}
#else
DEFINE_UNARY_ELEMENTWISE_FUNC_VIA_FLOAT_METHOD(neg)
DEFINE_UNARY_ELEMENTWISE_FUNC_VIA_FLOAT_METHOD(reciprocal)
DEFINE_BINARY_COMPARISON_OPERATOR_VIA_FLOAT_METHOD(operator==)
DEFINE_BINARY_COMPARISON_OPERATOR_VIA_FLOAT_METHOD(operator!=)
@ -364,6 +402,7 @@ class Vectorized<c10::BFloat16> : public Vectorized16<
DEFINE_BINARY_COMPARISON_OPERATOR_VIA_FLOAT_METHOD(operator<=)
DEFINE_BINARY_COMPARISON_OPERATOR_VIA_FLOAT_METHOD(operator>)
DEFINE_BINARY_COMPARISON_OPERATOR_VIA_FLOAT_METHOD(operator>=)
#endif
#undef DEFINE_UNARY_ELEMENTWISE_FUNC_VIA_FLOAT_METHOD
#undef DEFINE_BINARY_ELEMENTWISE_FUNC_VIA_FLOAT_METHOD
@ -412,28 +451,52 @@ template <>
Vectorized<c10::BFloat16> inline operator+(
const Vectorized<c10::BFloat16>& a,
const Vectorized<c10::BFloat16>& b) {
#ifdef __ARM_FEATURE_BF16
bfloat16x8_t x = a;
bfloat16x8_t y = b;
return x + y;
#else
return binary_operator_via_float(std::plus<Vectorized<float>>(), a, b);
#endif
}
template <>
Vectorized<c10::BFloat16> inline operator-(
const Vectorized<c10::BFloat16>& a,
const Vectorized<c10::BFloat16>& b) {
#ifdef __ARM_FEATURE_BF16
bfloat16x8_t x = a;
bfloat16x8_t y = b;
return x - y;
#else
return binary_operator_via_float(std::minus<Vectorized<float>>(), a, b);
#endif
}
template <>
Vectorized<c10::BFloat16> inline operator*(
const Vectorized<c10::BFloat16>& a,
const Vectorized<c10::BFloat16>& b) {
#ifdef __ARM_FEATURE_BF16
bfloat16x8_t x = a;
bfloat16x8_t y = b;
return x * y;
#else
return binary_operator_via_float(std::multiplies<Vectorized<float>>(), a, b);
#endif
}
template <>
Vectorized<c10::BFloat16> inline operator/(
const Vectorized<c10::BFloat16>& a,
const Vectorized<c10::BFloat16>& b) {
#ifdef __ARM_FEATURE_BF16
bfloat16x8_t x = a;
bfloat16x8_t y = b;
return x / y;
#else
return binary_operator_via_float(std::divides<Vectorized<float>>(), a, b);
#endif
}
// frac. Implement this here so we can use subtraction
@ -544,12 +607,19 @@ Vectorized<c10::BFloat16> inline fmadd(
const Vectorized<c10::BFloat16>& a,
const Vectorized<c10::BFloat16>& b,
const Vectorized<c10::BFloat16>& c) {
#ifdef __ARM_FEATURE_BF16
bfloat16x8_t x = a;
bfloat16x8_t y = b;
bfloat16x8_t z = c;
return x * y + z;
#else
// NOTE [BF16 FMA]: There isn't an FMA that accumulates into BF16! Also,
// vbfmlalbq_f32 and vbfmlaltq_f32 take the even and odd-numbered
// elements, not the bottom and top half, so they don't seem
// particularly useful here. Ideally we would include dot product in
// the Vectorized interface...
return a * b + c;
#endif
}
template <>
@ -557,8 +627,15 @@ Vectorized<c10::BFloat16> inline fnmadd(
const Vectorized<c10::BFloat16>& a,
const Vectorized<c10::BFloat16>& b,
const Vectorized<c10::BFloat16>& c) {
#ifdef __ARM_FEATURE_BF16
bfloat16x8_t x = a;
bfloat16x8_t y = b;
bfloat16x8_t z = c;
return (-x) * y + z;
#else
// See NOTE [BF16 FMA] above.
return -a * b + c;
#endif
}
template <>
@ -566,8 +643,15 @@ Vectorized<c10::BFloat16> inline fmsub(
const Vectorized<c10::BFloat16>& a,
const Vectorized<c10::BFloat16>& b,
const Vectorized<c10::BFloat16>& c) {
#ifdef __ARM_FEATURE_BF16
bfloat16x8_t x = a;
bfloat16x8_t y = b;
bfloat16x8_t z = c;
return x * y - z;
#else
// See NOTE [BF16 FMA] above.
return a * b - c;
#endif
}
template <>
@ -575,8 +659,15 @@ Vectorized<c10::BFloat16> inline fnmsub(
const Vectorized<c10::BFloat16>& a,
const Vectorized<c10::BFloat16>& b,
const Vectorized<c10::BFloat16>& c) {
#ifdef __ARM_FEATURE_BF16
bfloat16x8_t x = a;
bfloat16x8_t y = b;
bfloat16x8_t z = c;
return (-x) * y - z;
#else
// See NOTE [BF16 FMA] above.
return -a * b - c;
#endif
}
#endif // !defined(C10_MOBILE) && defined(__aarch64__)

View File

@ -5,6 +5,114 @@
namespace at::vec {
inline namespace CPU_CAPABILITY {
#if (defined(__aarch64__) && !defined(CPU_CAPABILITY_SVE256))
// Enable auto-vectorization for GCC-13+ and clang-17+
// GCC-12 has a bug: gcc.gnu.org/bugzilla/show_bug.cgi?id=117001
#if __GNUC__ > 12 || (defined(__clang__) && (__clang_major__ >= 17))
template <typename from_type, typename to_type>
inline void convertImpl(
const from_type* __restrict src,
to_type* __restrict dst,
int64_t n) {
uint64_t len = static_cast<uint64_t>(n);
for (uint64_t i = 0; i < len; i++) {
dst[i] = static_cast<to_type>(src[i]);
}
}
#define CONVERT_TEMPLATE(from_type, to_type) \
template <> \
inline void convert(const from_type* src, to_type* dst, int64_t n) { \
return convertImpl<from_type, to_type>(src, dst, n); \
}
CONVERT_TEMPLATE(uint8_t, uint8_t)
CONVERT_TEMPLATE(uint8_t, int8_t)
CONVERT_TEMPLATE(uint8_t, int16_t)
CONVERT_TEMPLATE(uint8_t, int32_t)
CONVERT_TEMPLATE(uint8_t, int64_t)
CONVERT_TEMPLATE(uint8_t, float)
CONVERT_TEMPLATE(uint8_t, double)
CONVERT_TEMPLATE(int8_t, uint8_t)
CONVERT_TEMPLATE(int8_t, int8_t)
CONVERT_TEMPLATE(int8_t, int16_t)
CONVERT_TEMPLATE(int8_t, int32_t)
CONVERT_TEMPLATE(int8_t, int64_t)
CONVERT_TEMPLATE(int8_t, float)
CONVERT_TEMPLATE(int8_t, double)
CONVERT_TEMPLATE(int16_t, uint8_t)
CONVERT_TEMPLATE(int16_t, int8_t)
CONVERT_TEMPLATE(int16_t, int16_t)
CONVERT_TEMPLATE(int16_t, int32_t)
CONVERT_TEMPLATE(int16_t, int64_t)
CONVERT_TEMPLATE(int16_t, float)
CONVERT_TEMPLATE(int16_t, double)
CONVERT_TEMPLATE(int32_t, uint8_t)
CONVERT_TEMPLATE(int32_t, int8_t)
CONVERT_TEMPLATE(int32_t, int16_t)
CONVERT_TEMPLATE(int32_t, int32_t)
CONVERT_TEMPLATE(int32_t, int64_t)
CONVERT_TEMPLATE(int32_t, float)
CONVERT_TEMPLATE(int32_t, double)
CONVERT_TEMPLATE(int64_t, uint8_t)
CONVERT_TEMPLATE(int64_t, int8_t)
CONVERT_TEMPLATE(int64_t, int16_t)
CONVERT_TEMPLATE(int64_t, int32_t)
CONVERT_TEMPLATE(int64_t, int64_t)
CONVERT_TEMPLATE(int64_t, float)
CONVERT_TEMPLATE(int64_t, double)
CONVERT_TEMPLATE(float, uint8_t)
CONVERT_TEMPLATE(float, int8_t)
CONVERT_TEMPLATE(float, int16_t)
CONVERT_TEMPLATE(float, int32_t)
CONVERT_TEMPLATE(float, int64_t)
CONVERT_TEMPLATE(float, float)
CONVERT_TEMPLATE(float, double)
CONVERT_TEMPLATE(double, uint8_t)
CONVERT_TEMPLATE(double, int8_t)
CONVERT_TEMPLATE(double, int16_t)
CONVERT_TEMPLATE(double, int32_t)
CONVERT_TEMPLATE(double, int64_t)
CONVERT_TEMPLATE(double, float)
CONVERT_TEMPLATE(double, double)
#ifdef __ARM_FEATURE_FP16_VECTOR_ARITHMETIC
CONVERT_TEMPLATE(float16_t, uint8_t)
CONVERT_TEMPLATE(float16_t, int8_t)
CONVERT_TEMPLATE(float16_t, int16_t)
CONVERT_TEMPLATE(float16_t, int32_t)
CONVERT_TEMPLATE(float16_t, int64_t)
CONVERT_TEMPLATE(float16_t, float16_t)
CONVERT_TEMPLATE(float16_t, float)
CONVERT_TEMPLATE(float16_t, double)
CONVERT_TEMPLATE(uint8_t, float16_t)
CONVERT_TEMPLATE(int8_t, float16_t)
CONVERT_TEMPLATE(int16_t, float16_t)
CONVERT_TEMPLATE(int32_t, float16_t)
CONVERT_TEMPLATE(int64_t, float16_t)
CONVERT_TEMPLATE(float, float16_t)
CONVERT_TEMPLATE(double, float16_t)
#endif
#ifdef __ARM_FEATURE_BF16
CONVERT_TEMPLATE(bfloat16_t, uint8_t)
CONVERT_TEMPLATE(bfloat16_t, int8_t)
CONVERT_TEMPLATE(bfloat16_t, int16_t)
CONVERT_TEMPLATE(bfloat16_t, int32_t)
CONVERT_TEMPLATE(bfloat16_t, int64_t)
CONVERT_TEMPLATE(bfloat16_t, bfloat16_t)
CONVERT_TEMPLATE(bfloat16_t, float)
CONVERT_TEMPLATE(bfloat16_t, double)
CONVERT_TEMPLATE(uint8_t, bfloat16_t)
CONVERT_TEMPLATE(int8_t, bfloat16_t)
CONVERT_TEMPLATE(int16_t, bfloat16_t)
CONVERT_TEMPLATE(int32_t, bfloat16_t)
CONVERT_TEMPLATE(int64_t, bfloat16_t)
CONVERT_TEMPLATE(float, bfloat16_t)
CONVERT_TEMPLATE(double, bfloat16_t)
#endif
#endif
template <typename src_t>
struct VecConvert<
float,

View File

@ -0,0 +1,586 @@
#pragma once
#include <ATen/cpu/vec/intrinsics.h>
#include <ATen/cpu/vec/vec_base.h>
#include <c10/macros/Macros.h>
#include <c10/util/irange.h>
#include <cmath>
namespace at::vec {
// Note [CPU_CAPABILITY namespace]
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// This header, and all of its subheaders, will be compiled with
// different architecture flags for each supported set of vector
// intrinsics. So we need to make sure they aren't inadvertently
// linked together. We do this by declaring objects in an `inline
// namespace` which changes the name mangling, but can still be
// accessed as `at::vec`.
inline namespace CPU_CAPABILITY {
template <>
struct is_vec_specialized_for<double> : std::bool_constant<true> {};
template <>
class Vectorized<double> {
private:
float64x2_t values;
public:
using value_type = double;
using size_type = int;
static constexpr size_type size() {
return 2;
}
Vectorized() {
values = vdupq_n_f64(0.0);
}
Vectorized(float64x2_t v) : values(v) {}
Vectorized(double val) {
values = vdupq_n_f64(val);
}
template <
typename... Args,
typename = std::enable_if_t<(sizeof...(Args) == size())>>
Vectorized(Args... vals) {
__at_align__ double buffer[size()] = {vals...};
values = vld1q_f64(buffer);
}
operator float64x2_t() const {
return values;
}
template <int64_t mask>
static Vectorized<double> blend(
const Vectorized<double>& a,
const Vectorized<double>& b) {
// Build an array of flags: each bit of element is 1 if the corresponding
// bit in 'mask' is set, 0 otherwise.
uint64x2_t maskArray = {
(mask & 1ULL) ? 0xFFFFFFFFFFFFFFFF : 0,
(mask & 2ULL) ? 0xFFFFFFFFFFFFFFFF : 0};
// Use BSL to select elements from b where the mask is 1, else from a
return vbslq_f64(maskArray, b.values, a.values);
}
static Vectorized<double> blendv(
const Vectorized<double>& a,
const Vectorized<double>& b,
const Vectorized<double>& mask_) {
return vbslq_f64(vreinterpretq_u64_f64(mask_.values), b.values, a.values);
}
template <typename step_t>
static Vectorized<double> arange(
double base = 0.,
step_t step = static_cast<step_t>(1)) {
return {base, base + static_cast<double>(step)};
}
static inline Vectorized<double> set(
const Vectorized<double>& a,
const Vectorized<double>& b,
int64_t count = size()) {
if (count == 0) {
return a;
} else if (count >= 2) {
return b;
} else {
float64x2_t c = {b.values[0], a.values[1]};
return c;
}
}
static Vectorized<double> loadu(const void* ptr, int64_t count = size()) {
if (count == size()) {
return vld1q_f64(reinterpret_cast<const double*>(ptr));
} else if (count == 1) {
float64x1_t x = vld1_f64(reinterpret_cast<const double*>(ptr));
float64x1_t z = {0.0};
return vcombine_f64(x, z);
} else {
return vdupq_n_f64(0.0);
}
}
void store(void* ptr, int64_t count = size()) const {
if (count == size()) {
vst1q_f64(reinterpret_cast<double*>(ptr), values);
} else if (count == 1) {
vst1_f64(reinterpret_cast<double*>(ptr), vget_low_f64(values));
}
}
const double& operator[](int idx) const = delete;
double& operator[](int idx) = delete;
int64_t zero_mask() const {
// returns an integer mask where all zero elements are translated to 1-bit
// and others are translated to 0-bit
uint64x2_t cmpReg = vceqzq_f64(values);
uint64x2_t mask = {1, 2};
uint64x2_t res = vandq_u64(cmpReg, mask);
return res[0] | res[1];
}
Vectorized<double> isnan() const {
// NaN check
return vreinterpretq_f64_u32(
vmvnq_u32(vreinterpretq_u32_u64(vceqq_f64(values, values))));
}
bool has_inf_nan() const {
Vectorized<double> x = vsubq_f64(values, values);
float64x2_t r = x.isnan();
uint64x2_t u = vreinterpretq_u64_f64(r);
return u[0] | u[1];
}
Vectorized<double> map(double (*f)(double)) const {
float64x2_t result;
result[0] = f(values[0]);
result[1] = f(values[1]);
return result;
}
Vectorized<double> map2(
const Vectorized<double>& second,
double (*const f)(double, double)) const {
float64x2_t result;
result[0] = f(values[0], second.values[0]);
result[1] = f(values[1], second.values[1]);
return result;
}
Vectorized<double> abs() const {
return vabsq_f64(values);
}
Vectorized<double> angle() const {
auto zero = Vectorized<double>(0.0);
auto pi = Vectorized<double>(c10::pi<double>);
auto tmp = blendv(zero, pi, vreinterpretq_f64_u64(vcltzq_f64(values)));
return blendv(tmp, *this, isnan());
}
Vectorized<double> real() const {
return *this;
}
Vectorized<double> imag() const {
return Vectorized<double>(0.0);
}
Vectorized<double> conj() const {
return *this;
}
Vectorized<double> acos() const {
return USE_SLEEF(
Vectorized<double>(Sleef_acosd2_u10(values)), map(std::acos));
}
Vectorized<double> acosh() const {
return USE_SLEEF(
Vectorized<double>(Sleef_acoshd2_u10(values)), map(std::acosh));
}
Vectorized<double> asin() const {
return USE_SLEEF(
Vectorized<double>(Sleef_asind2_u10(values)), map(std::asin));
}
Vectorized<double> asinh() const {
return USE_SLEEF(
Vectorized<double>(Sleef_asinhd2_u10(values)), map(std::asinh));
}
Vectorized<double> atan() const {
return USE_SLEEF(
Vectorized<double>(Sleef_atand2_u10(values)), map(std::atan));
}
Vectorized<double> atanh() const {
return USE_SLEEF(
Vectorized<double>(Sleef_atanhd2_u10(values)), map(std::atanh));
}
Vectorized<double> atan2(const Vectorized<double>& b) const {USE_SLEEF(
{ return Vectorized<double>(Sleef_atan2d2_u10(values, b)); },
{
__at_align__ double tmp[size()];
__at_align__ double tmp_b[size()];
store(tmp);
b.store(tmp_b);
for (int64_t i = 0; i < size(); i++) {
tmp[i] = std::atan2(tmp[i], tmp_b[i]);
}
return loadu(tmp);
})} Vectorized<double> copysign(const Vectorized<double>& sign) const {
USE_SLEEF(
{ return Vectorized<double>(Sleef_copysignd2(values, sign)); },
{
__at_align__ double tmp[size()];
__at_align__ double tmp_sign[size()];
store(tmp);
sign.store(tmp_sign);
for (int64_t i = 0; i < size(); i++) {
tmp[i] = std::copysign(tmp[i], tmp_sign[i]);
}
return loadu(tmp);
})} Vectorized<double> erf() const {
return USE_SLEEF(
Vectorized<double>(Sleef_erfd2_u10(values)), map(std::erf));
}
Vectorized<double> erfc() const {
return USE_SLEEF(
Vectorized<double>(Sleef_erfcd2_u15(values)), map(std::erfc));
}
Vectorized<double> exp() const {
return USE_SLEEF(
Vectorized<double>(Sleef_expd2_u10(values)), map(std::exp));
}
Vectorized<double> exp2() const {
return USE_SLEEF(
Vectorized<double>(Sleef_exp2d2_u10(values)), map(std::exp2));
}
Vectorized<double> expm1() const {
return USE_SLEEF(
Vectorized<double>(Sleef_expm1d2_u10(values)), map(std::expm1));
}
Vectorized<double> fmod(const Vectorized<double>& q) const {USE_SLEEF(
{ return Vectorized<double>(Sleef_fmodd2(values, q)); },
{
__at_align__ double tmp[size()];
__at_align__ double tmp_q[size()];
store(tmp);
q.store(tmp_q);
for (int64_t i = 0; i < size(); i++) {
tmp[i] = std::fmod(tmp[i], tmp_q[i]);
}
return loadu(tmp);
})} Vectorized<double> hypot(const Vectorized<double>& b) const {
USE_SLEEF(
{ return Vectorized<double>(Sleef_hypotd2_u05(values, b)); },
{
__at_align__ double tmp[size()];
__at_align__ double tmp_b[size()];
store(tmp);
b.store(tmp_b);
for (int64_t i = 0; i < size(); i++) {
tmp[i] = std::hypot(tmp[i], tmp_b[i]);
}
return loadu(tmp);
})} Vectorized<double> i0() const {
return map(calc_i0);
}
Vectorized<double> nextafter(const Vectorized<double>& b) const {USE_SLEEF(
{ return Vectorized<double>(Sleef_nextafterd2(values, b)); },
{
__at_align__ double tmp[size()];
__at_align__ double tmp_b[size()];
store(tmp);
b.store(tmp_b);
for (int64_t i = 0; i < size(); ++i) {
tmp[i] = std::nextafter(tmp[i], tmp_b[i]);
}
return loadu(tmp);
})} Vectorized<double> log() const {
return USE_SLEEF(
Vectorized<double>(Sleef_logd2_u10(values)), map(std::log));
}
Vectorized<double> log2() const {
return USE_SLEEF(
Vectorized<double>(Sleef_log2d2_u10(values)), map(std::log2));
}
Vectorized<double> log10() const {
return USE_SLEEF(
Vectorized<double>(Sleef_log10d2_u10(values)), map(std::log10));
}
Vectorized<double> log1p() const {
return USE_SLEEF(
Vectorized<double>(Sleef_log1pd2_u10(values)), map(std::log1p));
}
Vectorized<double> frac() const;
Vectorized<double> sin() const {
return USE_SLEEF(
Vectorized<double>(Sleef_sind2_u10(values)), map(std::sin));
}
Vectorized<double> sinh() const {
return USE_SLEEF(
Vectorized<double>(Sleef_sinhd2_u10(values)), map(std::sinh));
}
Vectorized<double> cos() const {
return USE_SLEEF(
Vectorized<double>(Sleef_cosd2_u10(values)), map(std::cos));
}
Vectorized<double> cosh() const {
return USE_SLEEF(
Vectorized<double>(Sleef_coshd2_u10(values)), map(std::cosh));
}
Vectorized<double> pow(const Vectorized<double>& b) const {USE_SLEEF(
{ return Vectorized<double>(Sleef_powd2_u10(values, b)); },
{
__at_align__ double tmp[size()];
__at_align__ double tmp_b[size()];
store(tmp);
b.store(tmp_b);
for (int64_t i = 0; i < size(); i++) {
tmp[i] = std::pow(tmp[i], tmp_b[i]);
}
return loadu(tmp);
})} // Comparison using the _CMP_**_OQ predicate.
// `O`: get false if an operand is NaN
// `Q`: do not raise if an operand is NaN
Vectorized<double> tan() const {
return USE_SLEEF(
Vectorized<double>(Sleef_tand2_u10(values)), map(std::tan));
}
Vectorized<double> tanh() const {
return USE_SLEEF(
Vectorized<double>(Sleef_tanhd2_u10(values)), map(std::tanh));
}
Vectorized<double> lgamma() const {
return USE_SLEEF(
Vectorized<double>(Sleef_lgammad2_u10(values)), map(std::lgamma));
}
Vectorized<double> erfinv() const {
return map(calc_erfinv);
}
Vectorized<double> exp_u20() const {
return exp();
}
Vectorized<double> fexp_u20() const {
return exp();
}
Vectorized<double> i0e() const {
return map(calc_i0e);
}
Vectorized<double> digamma() const {
return map(calc_digamma);
}
Vectorized<double> igamma(const Vectorized<double>& x) const {
__at_align__ double tmp[size()];
__at_align__ double tmp_x[size()];
store(tmp);
x.store(tmp_x);
for (int64_t i = 0; i < size(); i++) {
tmp[i] = calc_igamma(tmp[i], tmp_x[i]);
}
return loadu(tmp);
}
Vectorized<double> igammac(const Vectorized<double>& x) const {
__at_align__ double tmp[size()];
__at_align__ double tmp_x[size()];
store(tmp);
x.store(tmp_x);
for (int64_t i = 0; i < size(); i++) {
tmp[i] = calc_igammac(tmp[i], tmp_x[i]);
}
return loadu(tmp);
}
Vectorized<double> ceil() const {
return vrndpq_f64(values);
}
Vectorized<double> floor() const {
return vrndmq_f64(values);
}
Vectorized<double> neg() const {
return vnegq_f64(values);
}
Vectorized<double> round() const {
return vrndiq_f64(values);
}
Vectorized<double> trunc() const {
return vrndq_f64(values);
}
Vectorized<double> sqrt() const {
return vsqrtq_f64(values);
}
Vectorized<double> reciprocal() const {
return vdivq_f64(vdupq_n_f64(1.0), values);
}
Vectorized<double> rsqrt() const {
return vdivq_f64(vdupq_n_f64(1.0), vsqrtq_f64(values));
}
double reduce_add() const {
return vaddvq_f64(values);
}
double reduce_max() const {
return vmaxvq_f64(values);
}
Vectorized<double> operator==(const Vectorized<double>& other) const {
return Vectorized<double>(
vreinterpretq_f64_u64(vceqq_f64(values, other.values)));
}
Vectorized<double> operator!=(const Vectorized<double>& other) const {
float64x2_t r0 = vreinterpretq_f64_u32(
vmvnq_u32(vreinterpretq_u32_u64(vceqq_f64(values, other.values))));
return Vectorized<double>(r0);
}
Vectorized<double> operator<(const Vectorized<double>& other) const {
return Vectorized<double>(
vreinterpretq_f64_u64(vcltq_f64(values, other.values)));
}
Vectorized<double> operator<=(const Vectorized<double>& other) const {
return Vectorized<double>(
vreinterpretq_f64_u64(vcleq_f64(values, other.values)));
}
Vectorized<double> operator>(const Vectorized<double>& other) const {
return Vectorized<double>(
vreinterpretq_f64_u64(vcgtq_f64(values, other.values)));
}
Vectorized<double> operator>=(const Vectorized<double>& other) const {
return Vectorized<double>(
vreinterpretq_f64_u64(vcgeq_f64(values, other.values)));
}
Vectorized<double> eq(const Vectorized<double>& other) const;
Vectorized<double> ne(const Vectorized<double>& other) const;
Vectorized<double> gt(const Vectorized<double>& other) const;
Vectorized<double> ge(const Vectorized<double>& other) const;
Vectorized<double> lt(const Vectorized<double>& other) const;
Vectorized<double> le(const Vectorized<double>& other) const;
};
template <>
Vectorized<double> inline operator+(
const Vectorized<double>& a,
const Vectorized<double>& b) {
return vaddq_f64(a, b);
}
template <>
Vectorized<double> inline operator-(
const Vectorized<double>& a,
const Vectorized<double>& b) {
return vsubq_f64(a, b);
}
template <>
Vectorized<double> inline operator*(
const Vectorized<double>& a,
const Vectorized<double>& b) {
return vmulq_f64(a, b);
}
template <>
Vectorized<double> inline operator/(
const Vectorized<double>& a,
const Vectorized<double>& b) {
return vdivq_f64(a, b);
}
// frac. Implement this here so we can use subtraction
Vectorized<double> inline Vectorized<double>::frac() const {
return *this - this->trunc();
}
// Implements the IEEE 754 201X `maximum` operation, which propagates NaN if
// either input is a NaN.
template <>
Vectorized<double> inline maximum(
const Vectorized<double>& a,
const Vectorized<double>& b) {
return vmaxq_f64(a, b);
}
// Implements the IEEE 754 201X `minimum` operation, which propagates NaN if
// either input is a NaN.
template <>
Vectorized<double> inline minimum(
const Vectorized<double>& a,
const Vectorized<double>& b) {
return vminq_f64(a, b);
}
template <>
Vectorized<double> inline clamp(
const Vectorized<double>& a,
const Vectorized<double>& min,
const Vectorized<double>& max) {
return vminq_f64(max, vmaxq_f64(min, a));
}
template <>
Vectorized<double> inline clamp_max(
const Vectorized<double>& a,
const Vectorized<double>& max) {
return vminq_f64(max, a);
}
template <>
Vectorized<double> inline clamp_min(
const Vectorized<double>& a,
const Vectorized<double>& min) {
return vmaxq_f64(min, a);
}
template <>
Vectorized<double> inline operator&(
const Vectorized<double>& a,
const Vectorized<double>& b) {
return vreinterpretq_f64_u64(
vandq_u64(vreinterpretq_u64_f64(a), vreinterpretq_u64_f64(b)));
}
template <>
Vectorized<double> inline operator|(
const Vectorized<double>& a,
const Vectorized<double>& b) {
return vreinterpretq_f64_u64(
vorrq_u64(vreinterpretq_u64_f64(a), vreinterpretq_u64_f64(b)));
}
template <>
Vectorized<double> inline operator^(
const Vectorized<double>& a,
const Vectorized<double>& b) {
return vreinterpretq_f64_u64(
veorq_u64(vreinterpretq_u64_f64(a), vreinterpretq_u64_f64(b)));
}
inline Vectorized<double> Vectorized<double>::eq(
const Vectorized<double>& other) const {
return (*this == other) & Vectorized<double>(1.0);
}
inline Vectorized<double> Vectorized<double>::ne(
const Vectorized<double>& other) const {
return (*this != other) & Vectorized<double>(1.0);
}
inline Vectorized<double> Vectorized<double>::gt(
const Vectorized<double>& other) const {
return (*this > other) & Vectorized<double>(1.0);
}
inline Vectorized<double> Vectorized<double>::ge(
const Vectorized<double>& other) const {
return (*this >= other) & Vectorized<double>(1.0);
}
inline Vectorized<double> Vectorized<double>::lt(
const Vectorized<double>& other) const {
return (*this < other) & Vectorized<double>(1.0);
}
inline Vectorized<double> Vectorized<double>::le(
const Vectorized<double>& other) const {
return (*this <= other) & Vectorized<double>(1.0);
}
template <>
Vectorized<double> inline fmadd(
const Vectorized<double>& a,
const Vectorized<double>& b,
const Vectorized<double>& c) {
return vfmaq_f64(c, a, b);
}
template <>
Vectorized<double> inline fnmadd(
const Vectorized<double>& a,
const Vectorized<double>& b,
const Vectorized<double>& c) {
return vfmsq_f64(c, a, b);
}
template <>
Vectorized<double> inline fmsub(
const Vectorized<double>& a,
const Vectorized<double>& b,
const Vectorized<double>& c) {
return vfmaq_f64(vnegq_f64(c), a, b);
}
template <>
Vectorized<double> inline fnmsub(
const Vectorized<double>& a,
const Vectorized<double>& b,
const Vectorized<double>& c) {
return vfmsq_f64(vnegq_f64(c), a, b);
}
} // namespace CPU_CAPABILITY
} // namespace at::vec

View File

@ -307,11 +307,49 @@ class Vectorized<float> {
DEFINE_SLEEF_COMPATIBLE_UNARY_ELEMENTWISE_FUNC(exp)
DEFINE_SLEEF_COMPATIBLE_UNARY_ELEMENTWISE_FUNC(exp2)
DEFINE_SLEEF_COMPATIBLE_UNARY_ELEMENTWISE_FUNC(expm1)
// Implementation copied from Arm Optimized Routine
// https://github.com/ARM-software/optimized-routines/blob/master/math/aarch64/advsimd/expf.c
Vectorized<float> exp_u20() const {
return exp();
// bail out to sleef if it's a special case:
// i.e. there's an input s.t. |input| > 87.3....
const float32x4_t special_bound = vdupq_n_f32(0x1.5d5e2ap+6f);
uint32x4_t cmp = vcagtq_f32(values, special_bound);
if (vpaddd_u64(vreinterpretq_u64_u32(cmp)) != 0) {
return exp();
}
const float32x4_t inv_ln2 = vdupq_n_f32(0x1.715476p+0f);
const float ln2_hi = 0x1.62e4p-1f;
const float ln2_lo = 0x1.7f7d1cp-20f;
const float c0 = 0x1.0e4020p-7f;
const float c2 = 0x1.555e66p-3f;
const float32x4_t ln2_c02 = {ln2_hi, ln2_lo, c0, c2};
const uint32x4_t exponent_bias = vdupq_n_u32(0x3f800000);
const float32x4_t c1 = vdupq_n_f32(0x1.573e2ep-5f);
const float32x4_t c3 = vdupq_n_f32(0x1.fffdb6p-2f);
const float32x4_t c4 = vdupq_n_f32(0x1.ffffecp-1f);
/* exp(x) = 2^n (1 + poly(r)), with 1 + poly(r) in [1/sqrt(2),sqrt(2)]
x = ln2*n + r, with r in [-ln2/2, ln2/2]. */
float32x4_t n = vrndaq_f32(vmulq_f32(values, inv_ln2));
float32x4_t r = vfmsq_laneq_f32(values, n, ln2_c02, 0);
r = vfmsq_laneq_f32(r, n, ln2_c02, 1);
uint32x4_t e = vshlq_n_u32(vreinterpretq_u32_s32(vcvtq_s32_f32(n)), 23);
float32x4_t scale = vreinterpretq_f32_u32(vaddq_u32(e, exponent_bias));
float32x4_t r2 = vmulq_f32(r, r);
float32x4_t p = vfmaq_laneq_f32(c1, r, ln2_c02, 2);
float32x4_t q = vfmaq_laneq_f32(c3, r, ln2_c02, 3);
q = vfmaq_f32(q, p, r2);
p = vmulq_f32(c4, r);
float32x4_t poly = vfmaq_f32(p, q, r2);
return vfmaq_f32(scale, poly, scale);
}
Vectorized<float> fexp_u20() const {
return exp();
return exp_u20();
}
DEFINE_SLEEF_COMPATIBLE_BINARY_ELEMENTWISE_FUNC_WITH_SLEEF_NAME(
fmod,
@ -540,42 +578,6 @@ inline Vectorized<float> Vectorized<float>::le(
return (*this <= other) & Vectorized<float>(1.0f);
}
template <>
inline void convert(const float* src, int32_t* dst, int64_t n) {
int64_t i;
#ifndef __msvc_cl__
#pragma unroll
#endif
for (i = 0; i <= (n - Vectorized<float>::size());
i += Vectorized<float>::size()) {
vst1q_s32(dst + i, vcvtq_s32_f32(vld1q_f32(src + i)));
}
#ifndef __msvc_cl__
#pragma unroll
#endif
for (; i < n; i++) {
dst[i] = static_cast<int32_t>(src[i]);
}
}
template <>
inline void convert(const int32_t* src, float* dst, int64_t n) {
int64_t i;
#ifndef __msvc_cl__
#pragma unroll
#endif
for (i = 0; i <= (n - Vectorized<float>::size());
i += Vectorized<float>::size()) {
vst1q_f32(dst + i, vcvtq_f32_s32(vld1q_s32(src + i)));
}
#ifndef __msvc_cl__
#pragma unroll
#endif
for (; i < n; i++) {
dst[i] = static_cast<float>(src[i]);
}
}
template <>
Vectorized<float> inline fmadd(
const Vectorized<float>& a,

View File

@ -569,46 +569,6 @@ inline Vectorized<c10::Half> Vectorized<c10::Half>::le(
return (*this <= other) & Vectorized<c10::Half>(1);
}
// These are global functions, so the defaults in vec_base.h should
// work fine if __ARM_FEATURE_FP16_VECTOR_ARITHMETIC is not available.
#ifdef __ARM_FEATURE_FP16_VECTOR_ARITHMETIC
template <>
inline void convert(const float16_t* src, int16_t* dst, int64_t n) {
int64_t i;
#ifndef __msvc_cl__
#pragma unroll
#endif
for (i = 0; i <= (n - Vectorized<c10::Half>::size());
i += Vectorized<c10::Half>::size()) {
vst1q_s16(dst + i, vcvtq_s16_f16(vld1q_f16(src + i)));
}
#ifndef __msvc_cl__
#pragma unroll
#endif
for (; i < n; i++) {
dst[i] = static_cast<int16_t>(src[i]);
}
}
template <>
inline void convert(const int16_t* src, float16_t* dst, int64_t n) {
int64_t i;
#ifndef __msvc_cl__
#pragma unroll
#endif
for (i = 0; i <= (n - Vectorized<c10::Half>::size());
i += Vectorized<c10::Half>::size()) {
vst1q_f16(dst + i, vcvtq_f16_s16(vld1q_s16(src + i)));
}
#ifndef __msvc_cl__
#pragma unroll
#endif
for (; i < n; i++) {
dst[i] = static_cast<float16_t>(src[i]);
}
}
#endif // __ARM_FEATURE_FP16_VECTOR_ARITHMETIC
template <>
Vectorized<c10::Half> inline fmadd(
const Vectorized<c10::Half>& a,

View File

@ -0,0 +1,794 @@
#pragma once
#include <ATen/cpu/vec/intrinsics.h>
#include <ATen/cpu/vec/vec_base.h>
#include <c10/macros/Macros.h>
#include <c10/util/irange.h>
namespace at::vec {
// Note [CPU_CAPABILITY namespace]
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// This header, and all of its subheaders, will be compiled with
// different architecture flags for each supported set of vector
// intrinsics. So we need to make sure they aren't inadvertently
// linked together. We do this by declaring objects in an `inline
// namespace` which changes the name mangling, but can still be
// accessed as `at::vec`.
inline namespace CPU_CAPABILITY {
#define VEC_INT_NEON_TEMPLATE(vl, bit) \
template <> \
struct is_vec_specialized_for<int##bit##_t> : std::bool_constant<true> {}; \
\
template <> \
class Vectorized<int##bit##_t> { \
using neon_type = int##bit##x##vl##_t; \
\
private: \
neon_type values; \
\
public: \
using value_type = int##bit##_t; \
using size_type = int; \
static constexpr size_type size() { \
return vl; \
} \
Vectorized() { \
values = vdupq_n_s##bit(0); \
} \
Vectorized(neon_type v) : values(v) {} \
Vectorized(int##bit##_t val); \
template < \
typename... Args, \
typename = std::enable_if_t<(sizeof...(Args) == size())>> \
Vectorized(Args... vals) { \
__at_align__ int##bit##_t buffer[size()] = {vals...}; \
values = vld1q_s##bit(buffer); \
} \
operator neon_type() const { \
return values; \
} \
static Vectorized<int##bit##_t> loadu( \
const void* ptr, \
int64_t count = size()); \
void store(void* ptr, int64_t count = size()) const; \
template <int64_t mask> \
static Vectorized<int##bit##_t> blend( \
const Vectorized<int##bit##_t>& a, \
const Vectorized<int##bit##_t>& b); \
static Vectorized<int##bit##_t> blendv( \
const Vectorized<int##bit##_t>& a, \
const Vectorized<int##bit##_t>& b, \
const Vectorized<int##bit##_t>& mask_) { \
return vbslq_s##bit(vreinterpretq_u##bit##_s##bit(mask_.values), b, a); \
} \
template <typename step_t> \
static Vectorized<int##bit##_t> arange( \
value_type base = 0, \
step_t step = static_cast<step_t>(1)); \
static Vectorized<int##bit##_t> set( \
const Vectorized<int##bit##_t>& a, \
const Vectorized<int##bit##_t>& b, \
int64_t count = size()); \
const int##bit##_t& operator[](int idx) const = delete; \
int##bit##_t& operator[](int idx) = delete; \
Vectorized<int##bit##_t> abs() const { \
return vabsq_s##bit(values); \
} \
Vectorized<int##bit##_t> real() const { \
return values; \
} \
Vectorized<int##bit##_t> imag() const { \
return vdupq_n_s##bit(0); \
} \
Vectorized<int##bit##_t> conj() const { \
return values; \
} \
Vectorized<int##bit##_t> neg() const { \
return vnegq_s##bit(values); \
} \
int##bit##_t reduce_add() const { \
return vaddvq_s##bit(values); \
} \
int##bit##_t reduce_max() const; \
Vectorized<int##bit##_t> operator==( \
const Vectorized<int##bit##_t>& other) const { \
return Vectorized<value_type>( \
vreinterpretq_s##bit##_u##bit(vceqq_s##bit(values, other.values))); \
} \
Vectorized<int##bit##_t> operator!=( \
const Vectorized<int##bit##_t>& other) const; \
Vectorized<int##bit##_t> operator<( \
const Vectorized<int##bit##_t>& other) const { \
return Vectorized<value_type>( \
vreinterpretq_s##bit##_u##bit(vcltq_s##bit(values, other.values))); \
} \
Vectorized<int##bit##_t> operator<=( \
const Vectorized<int##bit##_t>& other) const { \
return Vectorized<value_type>( \
vreinterpretq_s##bit##_u##bit(vcleq_s##bit(values, other.values))); \
} \
Vectorized<int##bit##_t> operator>( \
const Vectorized<int##bit##_t>& other) const { \
return Vectorized<value_type>( \
vreinterpretq_s##bit##_u##bit(vcgtq_s##bit(values, other.values))); \
} \
Vectorized<int##bit##_t> operator>=( \
const Vectorized<int##bit##_t>& other) const { \
return Vectorized<value_type>( \
vreinterpretq_s##bit##_u##bit(vcgeq_s##bit(values, other.values))); \
} \
Vectorized<int##bit##_t> eq(const Vectorized<int##bit##_t>& other) const; \
Vectorized<int##bit##_t> ne(const Vectorized<int##bit##_t>& other) const; \
Vectorized<int##bit##_t> gt(const Vectorized<int##bit##_t>& other) const; \
Vectorized<int##bit##_t> ge(const Vectorized<int##bit##_t>& other) const; \
Vectorized<int##bit##_t> lt(const Vectorized<int##bit##_t>& other) const; \
Vectorized<int##bit##_t> le(const Vectorized<int##bit##_t>& other) const; \
}; \
template <> \
Vectorized<int##bit##_t> inline operator+( \
const Vectorized<int##bit##_t>& a, const Vectorized<int##bit##_t>& b) { \
return vaddq_s##bit(a, b); \
} \
template <> \
Vectorized<int##bit##_t> inline operator-( \
const Vectorized<int##bit##_t>& a, const Vectorized<int##bit##_t>& b) { \
return vsubq_s##bit(a, b); \
} \
template <> \
Vectorized<int##bit##_t> inline operator&( \
const Vectorized<int##bit##_t>& a, const Vectorized<int##bit##_t>& b) { \
return vandq_s##bit(a, b); \
} \
template <> \
Vectorized<int##bit##_t> inline operator|( \
const Vectorized<int##bit##_t>& a, const Vectorized<int##bit##_t>& b) { \
return vorrq_s##bit(a, b); \
} \
template <> \
Vectorized<int##bit##_t> inline operator^( \
const Vectorized<int##bit##_t>& a, const Vectorized<int##bit##_t>& b) { \
return veorq_s##bit(a, b); \
} \
Vectorized<int##bit##_t> inline Vectorized<int##bit##_t>::eq( \
const Vectorized<int##bit##_t>& other) const { \
return (*this == other) & Vectorized<int##bit##_t>(1); \
} \
Vectorized<int##bit##_t> inline Vectorized<int##bit##_t>::ne( \
const Vectorized<int##bit##_t>& other) const { \
return (*this != other) & Vectorized<int##bit##_t>(1); \
} \
Vectorized<int##bit##_t> inline Vectorized<int##bit##_t>::gt( \
const Vectorized<int##bit##_t>& other) const { \
return (*this > other) & Vectorized<int##bit##_t>(1); \
} \
Vectorized<int##bit##_t> inline Vectorized<int##bit##_t>::ge( \
const Vectorized<int##bit##_t>& other) const { \
return (*this >= other) & Vectorized<int##bit##_t>(1); \
} \
Vectorized<int##bit##_t> inline Vectorized<int##bit##_t>::lt( \
const Vectorized<int##bit##_t>& other) const { \
return (*this < other) & Vectorized<int##bit##_t>(1); \
} \
Vectorized<int##bit##_t> inline Vectorized<int##bit##_t>::le( \
const Vectorized<int##bit##_t>& other) const { \
return (*this <= other) & Vectorized<int##bit##_t>(1); \
}
VEC_INT_NEON_TEMPLATE(2, 64)
VEC_INT_NEON_TEMPLATE(4, 32)
VEC_INT_NEON_TEMPLATE(8, 16)
VEC_INT_NEON_TEMPLATE(16, 8)
inline int32_t Vectorized<int32_t>::reduce_max() const {
return vmaxvq_s32(values);
}
inline int16_t Vectorized<int16_t>::reduce_max() const {
return vmaxvq_s16(values);
}
inline int8_t Vectorized<int8_t>::reduce_max() const {
return vmaxvq_s8(values);
}
template <>
Vectorized<int32_t> inline operator*(
const Vectorized<int32_t>& a,
const Vectorized<int32_t>& b) {
return vmulq_s32(a, b);
}
template <>
Vectorized<int16_t> inline operator*(
const Vectorized<int16_t>& a,
const Vectorized<int16_t>& b) {
return vmulq_s16(a, b);
}
template <>
Vectorized<int8_t> inline operator*(
const Vectorized<int8_t>& a,
const Vectorized<int8_t>& b) {
return vmulq_s8(a, b);
}
template <>
inline Vectorized<int64_t> operator~(const Vectorized<int64_t>& a) {
int64x2_t val = a;
return ~val;
}
template <>
inline Vectorized<int32_t> operator~(const Vectorized<int32_t>& a) {
return vmvnq_s32(a);
}
template <>
inline Vectorized<int16_t> operator~(const Vectorized<int16_t>& a) {
return vmvnq_s16(a);
}
template <>
inline Vectorized<int8_t> operator~(const Vectorized<int8_t>& a) {
return vmvnq_s8(a);
}
inline Vectorized<int64_t> Vectorized<int64_t>::operator!=(
const Vectorized<int64_t>& other) const {
return ~(*this == other);
}
inline Vectorized<int32_t> Vectorized<int32_t>::operator!=(
const Vectorized<int32_t>& other) const {
return ~(*this == other);
}
inline Vectorized<int16_t> Vectorized<int16_t>::operator!=(
const Vectorized<int16_t>& other) const {
return ~(*this == other);
}
inline Vectorized<int8_t> Vectorized<int8_t>::operator!=(
const Vectorized<int8_t>& other) const {
return ~(*this == other);
}
template <>
Vectorized<int32_t> inline minimum(
const Vectorized<int32_t>& a,
const Vectorized<int32_t>& b) {
return vminq_s32(a, b);
}
template <>
Vectorized<int16_t> inline minimum(
const Vectorized<int16_t>& a,
const Vectorized<int16_t>& b) {
return vminq_s16(a, b);
}
template <>
Vectorized<int8_t> inline minimum(
const Vectorized<int8_t>& a,
const Vectorized<int8_t>& b) {
return vminq_s8(a, b);
}
template <>
Vectorized<int32_t> inline maximum(
const Vectorized<int32_t>& a,
const Vectorized<int32_t>& b) {
return vmaxq_s32(a, b);
}
template <>
Vectorized<int16_t> inline maximum(
const Vectorized<int16_t>& a,
const Vectorized<int16_t>& b) {
return vmaxq_s16(a, b);
}
template <>
Vectorized<int8_t> inline maximum(
const Vectorized<int8_t>& a,
const Vectorized<int8_t>& b) {
return vmaxq_s8(a, b);
}
template <int64_t mask>
Vectorized<int64_t> Vectorized<int64_t>::blend(
const Vectorized<int64_t>& a,
const Vectorized<int64_t>& b) {
// Build an array of flags: each bit of element is 1 if the corresponding bit
// in 'mask' is set, 0 otherwise.
uint64x2_t maskArray = {
(mask & 1LL) ? 0xFFFFFFFFFFFFFFFF : 0,
(mask & 2LL) ? 0xFFFFFFFFFFFFFFFF : 0};
// Use BSL to select elements from b where the mask is 1, else from a
return vbslq_s64(maskArray, b.values, a.values);
}
template <int64_t mask>
Vectorized<int32_t> Vectorized<int32_t>::blend(
const Vectorized<int32_t>& a,
const Vectorized<int32_t>& b) {
// Build an array of flags: each bit of element is 1 if the corresponding bit
// in 'mask' is set, 0 otherwise.
uint32x4_t maskArray = {
(mask & 1LL) ? 0xFFFFFFFF : 0,
(mask & 2LL) ? 0xFFFFFFFF : 0,
(mask & 4LL) ? 0xFFFFFFFF : 0,
(mask & 8LL) ? 0xFFFFFFFF : 0};
// Use BSL to select elements from b where the mask is 1, else from a
return vbslq_s32(maskArray, b.values, a.values);
}
template <int64_t mask>
Vectorized<int16_t> Vectorized<int16_t>::blend(
const Vectorized<int16_t>& a,
const Vectorized<int16_t>& b) {
// Build an array of flags: each bit of element is 1 if the corresponding bit
// in 'mask' is set, 0 otherwise.
uint16x8_t maskArray = {
(mask & 1LL) ? 0xFFFF : 0,
(mask & 2LL) ? 0xFFFF : 0,
(mask & 4LL) ? 0xFFFF : 0,
(mask & 8LL) ? 0xFFFF : 0,
(mask & 16LL) ? 0xFFFF : 0,
(mask & 32LL) ? 0xFFFF : 0,
(mask & 64LL) ? 0xFFFF : 0,
(mask & 128LL) ? 0xFFFF : 0};
// Use BSL to select elements from b where the mask is 1, else from a
return vbslq_s16(maskArray, b.values, a.values);
}
template <int64_t mask>
Vectorized<int8_t> Vectorized<int8_t>::blend(
const Vectorized<int8_t>& a,
const Vectorized<int8_t>& b) {
// Build an array of flags: each bit of element is 1 if the corresponding bit
// in 'mask' is set, 0 otherwise.
uint8x16_t maskArray = {
(mask & 1LL) ? 0xFF : 0,
(mask & 2LL) ? 0xFF : 0,
(mask & 4LL) ? 0xFF : 0,
(mask & 8LL) ? 0xFF : 0,
(mask & 16LL) ? 0xFF : 0,
(mask & 32LL) ? 0xFF : 0,
(mask & 64LL) ? 0xFF : 0,
(mask & 128LL) ? 0xFF : 0,
(mask & 256LL) ? 0xFF : 0,
(mask & 512LL) ? 0xFF : 0,
(mask & 1024LL) ? 0xFF : 0,
(mask & 2048LL) ? 0xFF : 0,
(mask & 4096LL) ? 0xFF : 0,
(mask & 8192LL) ? 0xFF : 0,
(mask & 16384LL) ? 0xFF : 0,
(mask & 32768LL) ? 0xFF : 0};
// Use BSL to select elements from b where the mask is 1, else from a
return vbslq_s8(maskArray, b.values, a.values);
}
#define VEC_INT_NEON_OPS(vl, bit) \
inline Vectorized<int##bit##_t>::Vectorized(int##bit##_t val) { \
values = vdupq_n_s##bit(val); \
} \
inline Vectorized<int##bit##_t> Vectorized<int##bit##_t>::loadu( \
const void* ptr, int64_t count) { \
if (count == size()) { \
return vld1q_s##bit(reinterpret_cast<const int##bit##_t*>(ptr)); \
} else { \
__at_align__ int##bit##_t tmp_values[size()]; \
for (const auto i : c10::irange(size())) { \
tmp_values[i] = 0; \
} \
std::memcpy( \
tmp_values, \
reinterpret_cast<const int##bit##_t*>(ptr), \
count * sizeof(int##bit##_t)); \
return vld1q_s##bit(reinterpret_cast<const int##bit##_t*>(tmp_values)); \
} \
} \
inline void Vectorized<int##bit##_t>::store(void* ptr, int64_t count) \
const { \
if (count == size()) { \
vst1q_s##bit(reinterpret_cast<int##bit##_t*>(ptr), values); \
} else { \
int##bit##_t tmp_values[size()]; \
vst1q_s##bit(reinterpret_cast<int##bit##_t*>(tmp_values), values); \
std::memcpy(ptr, tmp_values, count * sizeof(int##bit##_t)); \
} \
}
VEC_INT_NEON_OPS(2, 64)
VEC_INT_NEON_OPS(4, 32)
VEC_INT_NEON_OPS(8, 16)
VEC_INT_NEON_OPS(16, 8)
template <>
Vectorized<int64_t> inline operator*(
const Vectorized<int64_t>& a,
const Vectorized<int64_t>& b) {
int64x2_t x = a;
int64x2_t y = b;
return x * y;
}
template <>
Vectorized<int64_t> inline operator/(
const Vectorized<int64_t>& a,
const Vectorized<int64_t>& b) {
int64x2_t x = a;
int64x2_t y = b;
return x / y;
}
template <>
Vectorized<int32_t> inline operator/(
const Vectorized<int32_t>& a,
const Vectorized<int32_t>& b) {
int32x4_t x = a;
int32x4_t y = b;
return x / y;
}
inline int64_t Vectorized<int64_t>::reduce_max() const {
return std::max(values[0], values[1]);
}
template <>
Vectorized<int64_t> inline minimum(
const Vectorized<int64_t>& a,
const Vectorized<int64_t>& b) {
int64x2_t x = a;
int64x2_t y = b;
return {std::min(x[0], y[0]), std::min(x[1], y[1])};
}
template <>
Vectorized<int64_t> inline maximum(
const Vectorized<int64_t>& a,
const Vectorized<int64_t>& b) {
int64x2_t x = a;
int64x2_t y = b;
return {std::max(x[0], y[0]), std::max(x[1], y[1])};
}
template <typename step_t>
inline Vectorized<int64_t> Vectorized<int64_t>::arange(
int64_t base,
step_t step) {
const Vectorized<int64_t> base_vec(base);
const Vectorized<int64_t> step_vec(step);
const int64x2_t step_sizes = {0, 1};
return base_vec.values + step_sizes * step_vec.values;
}
template <typename step_t>
inline Vectorized<int32_t> Vectorized<int32_t>::arange(
int32_t base,
step_t step) {
const Vectorized<int32_t> base_vec(base);
const Vectorized<int32_t> step_vec(step);
const int32x4_t step_sizes = {0, 1, 2, 3};
return vmlaq_s32(base_vec, step_sizes, step_vec);
}
template <typename step_t>
inline Vectorized<int16_t> Vectorized<int16_t>::arange(
int16_t base,
step_t step) {
const Vectorized<int16_t> base_vec(base);
const Vectorized<int16_t> step_vec(step);
const int16x8_t step_sizes = {0, 1, 2, 3, 4, 5, 6, 7};
return vmlaq_s16(base_vec, step_sizes, step_vec);
}
template <typename step_t>
inline Vectorized<int8_t> Vectorized<int8_t>::arange(int8_t base, step_t step) {
const Vectorized<int8_t> base_vec(base);
const Vectorized<int8_t> step_vec(step);
const int8x16_t step_sizes = {
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15};
return vmlaq_s8(base_vec, step_sizes, step_vec);
}
template <>
Vectorized<int64_t> inline operator>>(
const Vectorized<int64_t>& a,
const Vectorized<int64_t>& b) {
int64x2_t x = a;
int64x2_t y = b;
uint64x2_t u = vreinterpretq_u64_s64(y);
uint64x2_t z = {std::min(u[0], (uint64_t)63), std::min(u[1], (uint64_t)63)};
return x >> vreinterpretq_s64_u64(z);
}
template <>
Vectorized<int32_t> inline operator>>(
const Vectorized<int32_t>& a,
const Vectorized<int32_t>& b) {
int32x4_t x = a;
int32x4_t y = b;
uint32x4_t bound = vdupq_n_u32(31);
uint32x4_t z = vminq_u32(vreinterpretq_u32_s32(y), bound);
return x >> vreinterpretq_s32_u32(z);
}
template <>
Vectorized<int16_t> inline operator>>(
const Vectorized<int16_t>& a,
const Vectorized<int16_t>& b) {
int16x8_t x = a;
int16x8_t y = b;
uint16x8_t bound = vdupq_n_u16(15);
uint16x8_t z = vminq_u16(vreinterpretq_u16_s16(y), bound);
return x >> vreinterpretq_s16_u16(z);
}
template <>
Vectorized<int8_t> inline operator>>(
const Vectorized<int8_t>& a,
const Vectorized<int8_t>& b) {
int8x16_t x = a;
int8x16_t y = b;
uint8x16_t bound = vdupq_n_u8(7);
int8x16_t z = vreinterpretq_s8_u8(vminq_u8(vreinterpretq_u8_s8(y), bound));
return x >> z;
}
template <>
Vectorized<int64_t> inline operator<<(
const Vectorized<int64_t>& a,
const Vectorized<int64_t>& b) {
int64x2_t y = b;
uint64x2_t u = vreinterpretq_u64_s64(y);
uint64x2_t z = {std::min(u[0], (uint64_t)64), std::min(u[1], (uint64_t)64)};
return vshlq_s64(a, vreinterpretq_s64_u64(z));
}
template <>
Vectorized<int32_t> inline operator<<(
const Vectorized<int32_t>& a,
const Vectorized<int32_t>& b) {
int32x4_t y = b;
uint32x4_t bound = vdupq_n_u32(32);
uint32x4_t z = vminq_u32(vreinterpretq_u32_s32(y), bound);
return vshlq_s32(a, vreinterpretq_s32_u32(z));
}
template <>
Vectorized<int16_t> inline operator<<(
const Vectorized<int16_t>& a,
const Vectorized<int16_t>& b) {
int16x8_t y = b;
uint16x8_t bound = vdupq_n_u16(16);
uint16x8_t z = vminq_u16(vreinterpretq_u16_s16(y), bound);
return vshlq_s16(a, vreinterpretq_s16_u16(z));
}
template <>
Vectorized<int8_t> inline operator<<(
const Vectorized<int8_t>& a,
const Vectorized<int8_t>& b) {
int8x16_t y = b;
uint8x16_t bound = vdupq_n_u8(8);
int8x16_t z = vreinterpretq_s8_u8(vminq_u8(vreinterpretq_u8_s8(y), bound));
return vshlq_s8(a, z);
}
inline Vectorized<int64_t> Vectorized<int64_t>::set(
const Vectorized<int64_t>& a,
const Vectorized<int64_t>& b,
int64_t count) {
if (count == 0) {
return a;
} else if (count >= 2) {
return b;
} else {
int64x2_t c = {b.values[0], a.values[1]};
return c;
}
}
inline Vectorized<int32_t> Vectorized<int32_t>::set(
const Vectorized<int32_t>& a,
const Vectorized<int32_t>& b,
int64_t count) {
if (count == 0) {
return a;
} else if (count >= 4) {
return b;
} else {
// Build an array of flags: each bit of element is 1 if the corresponding
// bit in 'mask' is set, 0 otherwise.
uint32x4_t maskArray = {
(count >= 1LL) ? 0xFFFFFFFF : 0,
(count >= 2LL) ? 0xFFFFFFFF : 0,
(count >= 3LL) ? 0xFFFFFFFF : 0,
0};
// Use BSL to select elements from b where the mask is 1, else from a
return vbslq_s32(maskArray, b.values, a.values);
}
}
inline Vectorized<int16_t> Vectorized<int16_t>::set(
const Vectorized<int16_t>& a,
const Vectorized<int16_t>& b,
int64_t count) {
if (count == 0) {
return a;
} else if (count >= 8) {
return b;
} else {
// Build an array of flags: each bit of element is 1 if the corresponding
// bit in 'mask' is set, 0 otherwise.
uint16x8_t maskArray = {
static_cast<uint16_t>((count >= 1LL) ? 0xFFFF : 0),
static_cast<uint16_t>((count >= 2LL) ? 0xFFFF : 0),
static_cast<uint16_t>((count >= 3LL) ? 0xFFFF : 0),
static_cast<uint16_t>((count >= 4LL) ? 0xFFFF : 0),
static_cast<uint16_t>((count >= 5LL) ? 0xFFFF : 0),
static_cast<uint16_t>((count >= 6LL) ? 0xFFFF : 0),
static_cast<uint16_t>((count >= 7LL) ? 0xFFFF : 0),
0};
// Use BSL to select elements from b where the mask is 1, else from a
return vbslq_s16(maskArray, b.values, a.values);
}
}
inline Vectorized<int8_t> Vectorized<int8_t>::set(
const Vectorized<int8_t>& a,
const Vectorized<int8_t>& b,
int64_t count) {
if (count == 0) {
return a;
} else if (count >= 16) {
return b;
} else {
// Build an array of flags: each bit of element is 1 if the corresponding
// bit in 'mask' is set, 0 otherwise.
uint8x16_t maskArray = {
static_cast<uint8_t>((count >= 1LL) ? 0xFF : 0),
static_cast<uint8_t>((count >= 2LL) ? 0xFF : 0),
static_cast<uint8_t>((count >= 3LL) ? 0xFF : 0),
static_cast<uint8_t>((count >= 4LL) ? 0xFF : 0),
static_cast<uint8_t>((count >= 5LL) ? 0xFF : 0),
static_cast<uint8_t>((count >= 6LL) ? 0xFF : 0),
static_cast<uint8_t>((count >= 7LL) ? 0xFF : 0),
static_cast<uint8_t>((count >= 8LL) ? 0xFF : 0),
static_cast<uint8_t>((count >= 9LL) ? 0xFF : 0),
static_cast<uint8_t>((count >= 10LL) ? 0xFF : 0),
static_cast<uint8_t>((count >= 11LL) ? 0xFF : 0),
static_cast<uint8_t>((count >= 12LL) ? 0xFF : 0),
static_cast<uint8_t>((count >= 13LL) ? 0xFF : 0),
static_cast<uint8_t>((count >= 14LL) ? 0xFF : 0),
static_cast<uint8_t>((count >= 15LL) ? 0xFF : 0),
0};
// Use BSL to select elements from b where the mask is 1, else from a
return vbslq_s8(maskArray, b.values, a.values);
}
}
template <>
Vectorized<int16_t> inline operator/(
const Vectorized<int16_t>& a,
const Vectorized<int16_t>& b) {
Vectorized<int32_t> highBitsA = vmovl_high_s16(a);
Vectorized<int32_t> highBitsB = vmovl_high_s16(b);
Vectorized<int32_t> lowBitsA = vmovl_s16(vget_low_s16(a));
Vectorized<int32_t> lowBitsB = vmovl_s16(vget_low_s16(b));
int32x4_t highBitsResult = highBitsA / highBitsB;
int32x4_t lowBitsResult = lowBitsA / lowBitsB;
return vuzp1q_s16(
vreinterpretq_s16_s32(lowBitsResult),
vreinterpretq_s16_s32(highBitsResult));
}
template <>
Vectorized<int8_t> inline operator/(
const Vectorized<int8_t>& a,
const Vectorized<int8_t>& b) {
Vectorized<int16_t> highBitsA = vmovl_high_s8(a);
Vectorized<int16_t> highBitsB = vmovl_high_s8(b);
Vectorized<int16_t> lowBitsA = vmovl_s8(vget_low_s8(a));
Vectorized<int16_t> lowBitsB = vmovl_s8(vget_low_s8(b));
int16x8_t highBitsResult = highBitsA / highBitsB;
int16x8_t lowBitsResult = lowBitsA / lowBitsB;
return vuzp1q_s8(
vreinterpretq_s8_s16(lowBitsResult),
vreinterpretq_s8_s16(highBitsResult));
}
template <>
Vectorized<int64_t> inline clamp(
const Vectorized<int64_t>& a,
const Vectorized<int64_t>& min,
const Vectorized<int64_t>& max) {
return minimum(max, maximum(min, a));
}
template <>
Vectorized<int32_t> inline clamp(
const Vectorized<int32_t>& a,
const Vectorized<int32_t>& min,
const Vectorized<int32_t>& max) {
return minimum(max, maximum(min, a));
}
template <>
Vectorized<int16_t> inline clamp(
const Vectorized<int16_t>& a,
const Vectorized<int16_t>& min,
const Vectorized<int16_t>& max) {
return minimum(max, maximum(min, a));
}
template <>
Vectorized<int8_t> inline clamp(
const Vectorized<int8_t>& a,
const Vectorized<int8_t>& min,
const Vectorized<int8_t>& max) {
return minimum(max, maximum(min, a));
}
template <>
Vectorized<int64_t> inline clamp_max(
const Vectorized<int64_t>& a,
const Vectorized<int64_t>& max) {
return minimum(max, a);
}
template <>
Vectorized<int32_t> inline clamp_max(
const Vectorized<int32_t>& a,
const Vectorized<int32_t>& max) {
return minimum(max, a);
}
template <>
Vectorized<int16_t> inline clamp_max(
const Vectorized<int16_t>& a,
const Vectorized<int16_t>& max) {
return minimum(max, a);
}
template <>
Vectorized<int8_t> inline clamp_max(
const Vectorized<int8_t>& a,
const Vectorized<int8_t>& max) {
return minimum(max, a);
}
template <>
Vectorized<int64_t> inline clamp_min(
const Vectorized<int64_t>& a,
const Vectorized<int64_t>& min) {
return maximum(min, a);
}
template <>
Vectorized<int32_t> inline clamp_min(
const Vectorized<int32_t>& a,
const Vectorized<int32_t>& min) {
return maximum(min, a);
}
template <>
Vectorized<int16_t> inline clamp_min(
const Vectorized<int16_t>& a,
const Vectorized<int16_t>& min) {
return maximum(min, a);
}
template <>
Vectorized<int8_t> inline clamp_min(
const Vectorized<int8_t>& a,
const Vectorized<int8_t>& min) {
return maximum(min, a);
}
} // namespace CPU_CAPABILITY
} // namespace at::vec

View File

@ -0,0 +1,378 @@
#pragma once
#include <ATen/cpu/vec/intrinsics.h>
#include <ATen/cpu/vec/vec_base.h>
#include <c10/macros/Macros.h>
#include <c10/util/irange.h>
namespace at::vec {
// Note [CPU_CAPABILITY namespace]
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// This header, and all of its subheaders, will be compiled with
// different architecture flags for each supported set of vector
// intrinsics. So we need to make sure they aren't inadvertently
// linked together. We do this by declaring objects in an `inline
// namespace` which changes the name mangling, but can still be
// accessed as `at::vec`.
inline namespace CPU_CAPABILITY {
#define VEC_UINT_NEON_TEMPLATE(vl, bit) \
template <> \
struct is_vec_specialized_for<uint##bit##_t> : std::bool_constant<true> {}; \
\
template <> \
class Vectorized<uint##bit##_t> { \
using neon_type = uint##bit##x##vl##_t; \
\
private: \
neon_type values; \
\
public: \
using value_type = uint##bit##_t; \
using size_type = int; \
static constexpr size_type size() { \
return vl; \
} \
Vectorized() { \
values = vdupq_n_u##bit(0); \
} \
Vectorized(neon_type v) : values(v) {} \
Vectorized(uint##bit##_t val); \
template < \
typename... Args, \
typename = std::enable_if_t<(sizeof...(Args) == size())>> \
Vectorized(Args... vals) { \
__at_align__ uint##bit##_t buffer[size()] = {vals...}; \
values = vld1q_u##bit(buffer); \
} \
operator neon_type() const { \
return values; \
} \
static Vectorized<uint##bit##_t> loadu( \
const void* ptr, \
uint64_t count = size()); \
void store(void* ptr, uint64_t count = size()) const; \
template <uint64_t mask> \
static Vectorized<uint##bit##_t> blend( \
const Vectorized<uint##bit##_t>& a, \
const Vectorized<uint##bit##_t>& b); \
static Vectorized<uint##bit##_t> blendv( \
const Vectorized<uint##bit##_t>& a, \
const Vectorized<uint##bit##_t>& b, \
const Vectorized<uint##bit##_t>& mask_) { \
return vbslq_u##bit(mask_.values, b, a); \
} \
template <typename step_t> \
static Vectorized<uint##bit##_t> arange( \
value_type base = 0, \
step_t step = static_cast<step_t>(1)); \
static Vectorized<uint##bit##_t> set( \
const Vectorized<uint##bit##_t>& a, \
const Vectorized<uint##bit##_t>& b, \
uint64_t count = size()); \
const uint##bit##_t& operator[](uint idx) const = delete; \
uint##bit##_t& operator[](uint idx) = delete; \
Vectorized<uint##bit##_t> abs() const { \
return values; \
} \
Vectorized<uint##bit##_t> real() const { \
return values; \
} \
Vectorized<uint##bit##_t> imag() const { \
return vdupq_n_u##bit(0); \
} \
Vectorized<uint##bit##_t> conj() const { \
return values; \
} \
Vectorized<uint##bit##_t> neg() const { \
return vreinterpretq_u##bit##_s##bit( \
vnegq_s##bit(vreinterpretq_s##bit##_u##bit(values))); \
} \
uint##bit##_t reduce_add() const { \
return vaddvq_u##bit(values); \
} \
uint##bit##_t reduce_max() const; \
Vectorized<uint##bit##_t> operator==( \
const Vectorized<uint##bit##_t>& other) const { \
return Vectorized<value_type>(vceqq_u##bit(values, other.values)); \
} \
Vectorized<uint##bit##_t> operator!=( \
const Vectorized<uint##bit##_t>& other) const; \
Vectorized<uint##bit##_t> operator<( \
const Vectorized<uint##bit##_t>& other) const { \
return Vectorized<value_type>(vcltq_u##bit(values, other.values)); \
} \
Vectorized<uint##bit##_t> operator<=( \
const Vectorized<uint##bit##_t>& other) const { \
return Vectorized<value_type>(vcleq_u##bit(values, other.values)); \
} \
Vectorized<uint##bit##_t> operator>( \
const Vectorized<uint##bit##_t>& other) const { \
return Vectorized<value_type>(vcgtq_u##bit(values, other.values)); \
} \
Vectorized<uint##bit##_t> operator>=( \
const Vectorized<uint##bit##_t>& other) const { \
return Vectorized<value_type>(vcgeq_u##bit(values, other.values)); \
} \
Vectorized<uint##bit##_t> eq( \
const Vectorized<uint##bit##_t>& other) const; \
Vectorized<uint##bit##_t> ne( \
const Vectorized<uint##bit##_t>& other) const; \
Vectorized<uint##bit##_t> gt( \
const Vectorized<uint##bit##_t>& other) const; \
Vectorized<uint##bit##_t> ge( \
const Vectorized<uint##bit##_t>& other) const; \
Vectorized<uint##bit##_t> lt( \
const Vectorized<uint##bit##_t>& other) const; \
Vectorized<uint##bit##_t> le( \
const Vectorized<uint##bit##_t>& other) const; \
}; \
template <> \
Vectorized<uint##bit##_t> inline operator+( \
const Vectorized<uint##bit##_t>& a, \
const Vectorized<uint##bit##_t>& b) { \
return vaddq_u##bit(a, b); \
} \
template <> \
Vectorized<uint##bit##_t> inline operator-( \
const Vectorized<uint##bit##_t>& a, \
const Vectorized<uint##bit##_t>& b) { \
return vsubq_u##bit(a, b); \
} \
template <> \
Vectorized<uint##bit##_t> inline operator&( \
const Vectorized<uint##bit##_t>& a, \
const Vectorized<uint##bit##_t>& b) { \
return vandq_u##bit(a, b); \
} \
template <> \
Vectorized<uint##bit##_t> inline operator|( \
const Vectorized<uint##bit##_t>& a, \
const Vectorized<uint##bit##_t>& b) { \
return vorrq_u##bit(a, b); \
} \
template <> \
Vectorized<uint##bit##_t> inline operator^( \
const Vectorized<uint##bit##_t>& a, \
const Vectorized<uint##bit##_t>& b) { \
return veorq_u##bit(a, b); \
} \
Vectorized<uint##bit##_t> inline Vectorized<uint##bit##_t>::eq( \
const Vectorized<uint##bit##_t>& other) const { \
return (*this == other) & Vectorized<uint##bit##_t>(1); \
} \
Vectorized<uint##bit##_t> inline Vectorized<uint##bit##_t>::ne( \
const Vectorized<uint##bit##_t>& other) const { \
return (*this != other) & Vectorized<uint##bit##_t>(1); \
} \
Vectorized<uint##bit##_t> inline Vectorized<uint##bit##_t>::gt( \
const Vectorized<uint##bit##_t>& other) const { \
return (*this > other) & Vectorized<uint##bit##_t>(1); \
} \
Vectorized<uint##bit##_t> inline Vectorized<uint##bit##_t>::ge( \
const Vectorized<uint##bit##_t>& other) const { \
return (*this >= other) & Vectorized<uint##bit##_t>(1); \
} \
Vectorized<uint##bit##_t> inline Vectorized<uint##bit##_t>::lt( \
const Vectorized<uint##bit##_t>& other) const { \
return (*this < other) & Vectorized<uint##bit##_t>(1); \
} \
Vectorized<uint##bit##_t> inline Vectorized<uint##bit##_t>::le( \
const Vectorized<uint##bit##_t>& other) const { \
return (*this <= other) & Vectorized<uint##bit##_t>(1); \
}
VEC_UINT_NEON_TEMPLATE(16, 8)
inline uint8_t Vectorized<uint8_t>::reduce_max() const {
return vmaxvq_u8(values);
}
template <>
Vectorized<uint8_t> inline operator*(
const Vectorized<uint8_t>& a,
const Vectorized<uint8_t>& b) {
return vmulq_u8(a, b);
}
template <>
inline Vectorized<uint8_t> operator~(const Vectorized<uint8_t>& a) {
return vmvnq_u8(a);
}
inline Vectorized<uint8_t> Vectorized<uint8_t>::operator!=(
const Vectorized<uint8_t>& other) const {
return ~(*this == other);
}
template <>
Vectorized<uint8_t> inline minimum(
const Vectorized<uint8_t>& a,
const Vectorized<uint8_t>& b) {
return vminq_u8(a, b);
}
template <>
Vectorized<uint8_t> inline maximum(
const Vectorized<uint8_t>& a,
const Vectorized<uint8_t>& b) {
return vmaxq_u8(a, b);
}
template <uint64_t mask>
Vectorized<uint8_t> Vectorized<uint8_t>::blend(
const Vectorized<uint8_t>& a,
const Vectorized<uint8_t>& b) {
// Build an array of flags: each bit of element is 1 if the corresponding bit
// in 'mask' is set, 0 otherwise.
uint8x16_t maskArray = {
(mask & 1LL) ? 0xFF : 0,
(mask & 2LL) ? 0xFF : 0,
(mask & 4LL) ? 0xFF : 0,
(mask & 8LL) ? 0xFF : 0,
(mask & 16LL) ? 0xFF : 0,
(mask & 32LL) ? 0xFF : 0,
(mask & 64LL) ? 0xFF : 0,
(mask & 128LL) ? 0xFF : 0,
(mask & 256LL) ? 0xFF : 0,
(mask & 512LL) ? 0xFF : 0,
(mask & 1024LL) ? 0xFF : 0,
(mask & 2048LL) ? 0xFF : 0,
(mask & 4096LL) ? 0xFF : 0,
(mask & 8192LL) ? 0xFF : 0,
(mask & 16384LL) ? 0xFF : 0,
(mask & 32768LL) ? 0xFF : 0};
// Use BSL to select elements from b where the mask is 1, else from a
return vbslq_u8(maskArray, b.values, a.values);
}
#define VEC_UINT_NEON_OPS(vl, bit) \
inline Vectorized<uint##bit##_t>::Vectorized(uint##bit##_t val) { \
values = vdupq_n_u##bit(val); \
} \
inline Vectorized<uint##bit##_t> Vectorized<uint##bit##_t>::loadu( \
const void* ptr, uint64_t count) { \
if (count == size()) { \
return vld1q_u##bit(reinterpret_cast<const uint##bit##_t*>(ptr)); \
} else { \
__at_align__ uint##bit##_t tmp_values[size()]; \
for (const auto i : c10::irange(size())) { \
tmp_values[i] = 0; \
} \
std::memcpy( \
tmp_values, \
reinterpret_cast<const uint##bit##_t*>(ptr), \
count * sizeof(uint##bit##_t)); \
return vld1q_u##bit(reinterpret_cast<const uint##bit##_t*>(tmp_values)); \
} \
} \
inline void Vectorized<uint##bit##_t>::store(void* ptr, uint64_t count) \
const { \
if (count == size()) { \
vst1q_u##bit(reinterpret_cast<uint##bit##_t*>(ptr), values); \
} else { \
uint##bit##_t tmp_values[size()]; \
vst1q_u##bit(reinterpret_cast<uint##bit##_t*>(tmp_values), values); \
std::memcpy(ptr, tmp_values, count * sizeof(uint##bit##_t)); \
} \
}
VEC_UINT_NEON_OPS(16, 8)
template <typename step_t>
inline Vectorized<uint8_t> Vectorized<uint8_t>::arange(
uint8_t base,
step_t step) {
const Vectorized<uint8_t> base_vec(base);
const Vectorized<uint8_t> step_vec(step);
const uint8x16_t step_sizes = {
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15};
return vmlaq_u8(base_vec, step_sizes, step_vec);
}
template <>
Vectorized<uint8_t> inline operator>>(
const Vectorized<uint8_t>& a,
const Vectorized<uint8_t>& b) {
uint8x16_t x = a;
uint8x16_t bound = vdupq_n_u8(8);
uint8x16_t z = vminq_u8(b, bound);
return x >> z;
}
template <>
Vectorized<uint8_t> inline operator<<(
const Vectorized<uint8_t>& a,
const Vectorized<uint8_t>& b) {
uint8x16_t bound = vdupq_n_u8(8);
uint8x16_t z = vminq_u8(b, bound);
return vshlq_u8(a, vreinterpretq_s8_u8(z));
}
inline Vectorized<uint8_t> Vectorized<uint8_t>::set(
const Vectorized<uint8_t>& a,
const Vectorized<uint8_t>& b,
uint64_t count) {
if (count == 0) {
return a;
} else if (count >= 16) {
return b;
} else {
// Build an array of flags: each bit of element is 1 if the corresponding
// bit in 'mask' is set, 0 otherwise.
uint8x16_t maskArray = {
static_cast<uint8_t>((count >= 1LL) ? 0xFF : 0),
static_cast<uint8_t>((count >= 2LL) ? 0xFF : 0),
static_cast<uint8_t>((count >= 3LL) ? 0xFF : 0),
static_cast<uint8_t>((count >= 4LL) ? 0xFF : 0),
static_cast<uint8_t>((count >= 5LL) ? 0xFF : 0),
static_cast<uint8_t>((count >= 6LL) ? 0xFF : 0),
static_cast<uint8_t>((count >= 7LL) ? 0xFF : 0),
static_cast<uint8_t>((count >= 8LL) ? 0xFF : 0),
static_cast<uint8_t>((count >= 9LL) ? 0xFF : 0),
static_cast<uint8_t>((count >= 10LL) ? 0xFF : 0),
static_cast<uint8_t>((count >= 11LL) ? 0xFF : 0),
static_cast<uint8_t>((count >= 12LL) ? 0xFF : 0),
static_cast<uint8_t>((count >= 13LL) ? 0xFF : 0),
static_cast<uint8_t>((count >= 14LL) ? 0xFF : 0),
static_cast<uint8_t>((count >= 15LL) ? 0xFF : 0),
0};
// Use BSL to select elements from b where the mask is 1, else from a
return vbslq_u8(maskArray, b.values, a.values);
}
}
template <>
Vectorized<uint8_t> inline operator/(
const Vectorized<uint8_t>& a,
const Vectorized<uint8_t>& b) {
uint8x16_t x = a;
uint8x16_t y = b;
return x / y;
}
template <>
Vectorized<uint8_t> inline clamp(
const Vectorized<uint8_t>& a,
const Vectorized<uint8_t>& min,
const Vectorized<uint8_t>& max) {
return minimum(max, maximum(min, a));
}
template <>
Vectorized<uint8_t> inline clamp_max(
const Vectorized<uint8_t>& a,
const Vectorized<uint8_t>& max) {
return minimum(max, a);
}
template <>
Vectorized<uint8_t> inline clamp_min(
const Vectorized<uint8_t>& a,
const Vectorized<uint8_t>& min) {
return maximum(min, a);
}
} // namespace CPU_CAPABILITY
} // namespace at::vec

View File

@ -1377,7 +1377,7 @@ Vectorized<c10::quint8> inline maximum(
#if (defined(__aarch64__) && !defined(CPU_CAPABILITY_SVE256))
std::pair<Vectorized<float>, Vectorized<float>> inline convert_int8_to_float(
at::vec::Vectorized<int8_t> src) {
auto s8x8 = vld1_s8(src.operator const int8_t*());
auto s8x8 = vget_low_s8(src);
auto s16x8 = vmovl_s8(s8x8);
auto s32x4_hi = vmovl_s16(vget_high_s16(s16x8));
@ -1390,7 +1390,7 @@ std::pair<Vectorized<float>, Vectorized<float>> inline convert_int8_to_float(
std::pair<Vectorized<float>, Vectorized<float>> inline convert_int8_to_float(
at::vec::Vectorized<uint8_t> src) {
auto u8x8 = vld1_u8(src.operator const uint8_t*());
auto u8x8 = vget_low_u8(src);
auto u16x8 = vmovl_u8(u8x8);
auto u32x4_hi = vmovl_u16(vget_high_u16(u16x8));
auto u32x4_lo = vmovl_u16(vget_low_u16(u16x8));
@ -1402,7 +1402,7 @@ std::pair<Vectorized<float>, Vectorized<float>> inline convert_int8_to_float(
Vectorized<float> inline convert_int8_half_register_to_float(
at::vec::Vectorized<int8_t> src) {
auto s8x8 = vld1_s8(src.operator const int8_t*());
auto s8x8 = vget_low_s8(src);
auto s16x8 = vmovl_s8(s8x8);
auto s32x4_lo = vmovl_s16(vget_low_s16(s16x8));
@ -1412,7 +1412,7 @@ Vectorized<float> inline convert_int8_half_register_to_float(
Vectorized<float> inline convert_int8_half_register_to_float(
at::vec::Vectorized<uint8_t> src) {
auto u8x8 = vld1_u8(src.operator const uint8_t*());
auto u8x8 = vget_low_u8(src);
auto u16x8 = vmovl_u8(u8x8);
auto u32x4_lo = vmovl_u16(vget_low_u16(u16x8));

View File

@ -16,6 +16,8 @@
#include <c10/util/irange.h>
#include <c10/core/ScalarType.h>
#include <ATen/cuda/detail/BLASConstants.h>
#ifdef USE_ROCM
#include <c10/cuda/CUDAStream.h>
#include <hipblaslt/hipblaslt-ext.hpp>
@ -1954,13 +1956,15 @@ void scaled_gemm(
const void *result_scale_ptr,
int64_t result_ld,
ScalarType result_dtype,
bool use_fast_accum) {
bool use_fast_accum,
const std::optional<Tensor>& alpha) {
// Note: see `cublasCommonArgs` for various non-intuitive manupulations
// of input arguments to this function.
const auto computeType = CUBLAS_COMPUTE_32F;
const auto scaleType = CUDA_R_32F;
const float alpha_val = 1.0;
const float beta_val = 0.0;
// Note: alpha_val may change later depending on user-passed argument
float alpha_val = 1.0;
float beta_val = 0.0;
CuBlasLtMatmulDescriptor computeDesc(computeType, scaleType);
computeDesc.setAttribute(CUBLASLT_MATMUL_DESC_TRANSA, _cublasOpFromChar(transa));
computeDesc.setAttribute(CUBLASLT_MATMUL_DESC_TRANSB, _cublasOpFromChar(transb));
@ -2031,6 +2035,33 @@ void scaled_gemm(
computeDesc.setAttribute(CUBLASLT_MATMUL_DESC_EPILOGUE, CUBLASLT_EPILOGUE_BIAS);
computeDesc.setAttribute(CUBLASLT_MATMUL_DESC_BIAS_DATA_TYPE, ScalarTypeToCudaDataType(bias_dtype));
}
// Handle user-passed alpha
float *alpha_ptr = &alpha_val;
float *beta_ptr = &beta_val;
if (alpha.has_value()) {
auto& a = alpha.value();
// if device-tensor
if (a.is_cuda()) {
// NOTE: there are lifetime requirements on device-side pointers for alpha/beta -- the value must be
// valid & correct until the cublas call finishes (not is scheduled like host-side values). Thus
// we need to use allocations for alpha/beta that have some guarantees on lifetime - a statically
// managed 4B buffer for alpha that we'll copy the passed alpha value into, and constant memory
// for beta respectively.
float *user_alpha_ptr = at::cuda::detail::get_user_alpha_ptr();
at::Tensor user_alpha = at::from_blob(user_alpha_ptr, {1}, TensorOptions().device(kCUDA).dtype(kFloat));
user_alpha.copy_(a);
// Tell cublasLt we're using device-side pointers for alpha/beta
auto pointer_mode = CUBLASLT_POINTER_MODE_DEVICE;
computeDesc.setAttribute(CUBLASLT_MATMUL_DESC_POINTER_MODE, pointer_mode);
alpha_ptr = user_alpha.data_ptr<float>();
beta_ptr = at::cuda::detail::get_cublas_device_zero();
} else {
alpha_val = a.item<float>();
}
}
// For other data types, use the get_scale_mode function based on scaling type
// The SCALE_MODE attrs only exist in cuBLAS 12.8+/ROCm 7.0 or in recent hipblaslt,
// but we must invoke get_scale_mode anyways to trigger the version checks.
@ -2048,6 +2079,7 @@ void scaled_gemm(
cublasLtMatmulHeuristicResult_t heuristicResult = {};
int returnedResult = 0;
cublasLtHandle_t ltHandle = at::cuda::getCurrentCUDABlasLtHandle();
TORCH_CUDABLAS_CHECK(cublasLtMatmulAlgoGetHeuristic(
ltHandle,
computeDesc.descriptor(),
@ -2088,10 +2120,10 @@ void scaled_gemm(
auto is_valid_status = hipblaslt_ext::matmulIsAlgoSupported(
ltHandle,
computeDesc.descriptor(),
&alpha_val,
alpha_ptr,
Adesc.descriptor(),
Bdesc.descriptor(),
&beta_val,
beta_ptr,
Cdesc.descriptor(),
Ddesc.descriptor(),
all_algos[i].algo,
@ -2110,17 +2142,14 @@ void scaled_gemm(
cublasStatus_t cublasStatus = cublasLtMatmul(
ltHandle,
computeDesc.descriptor(),
&alpha_val,
alpha_ptr,
mat1_ptr,
Adesc.descriptor(),
mat2_ptr,
Bdesc.descriptor(),
&beta_val,
#ifdef USE_ROCM
beta_ptr,
// NOTE: always use result_ptr here, because cuBLASLt w/device beta=0 can't handle nullptr either
result_ptr, // unused, since beta_val is 0, but hipblaslt can't handle nullptr
#else
nullptr,
#endif // ifdef USE_ROCM
Cdesc.descriptor(),
result_ptr,
Ddesc.descriptor(),

View File

@ -161,7 +161,8 @@ void scaled_gemm(
const void* result_scale_ptr,
int64_t result_ld,
ScalarType result_dtype,
bool use_fast_accum);
bool use_fast_accum,
const std::optional<Tensor>& alpha);
#define CUDABLAS_BGEMM_ARGTYPES(Dtype) CUDABLAS_BGEMM_ARGTYPES_AND_C_DTYPE(Dtype, Dtype)

View File

@ -325,9 +325,9 @@ uint64_t CUDAGeneratorImpl::seed() {
*/
c10::intrusive_ptr<c10::TensorImpl> CUDAGeneratorImpl::get_state() const {
// The RNG state comprises the seed, and an offset used for Philox.
static const size_t seed_size = sizeof(uint64_t);
static const size_t offset_size = sizeof(int64_t);
static const size_t total_size = seed_size + offset_size;
constexpr size_t seed_size = sizeof(uint64_t);
constexpr size_t offset_size = sizeof(int64_t);
constexpr size_t total_size = seed_size + offset_size;
auto state_tensor = at::detail::empty_cpu({(int64_t)total_size}, ScalarType::Byte, std::nullopt, std::nullopt, std::nullopt, std::nullopt);
auto rng_state = state_tensor.data_ptr<uint8_t>();
@ -346,9 +346,9 @@ c10::intrusive_ptr<c10::TensorImpl> CUDAGeneratorImpl::get_state() const {
* and size of the internal state.
*/
void CUDAGeneratorImpl::set_state(const c10::TensorImpl& new_state) {
static const size_t seed_size = sizeof(uint64_t);
static const size_t offset_size = sizeof(int64_t);
static const size_t total_size = seed_size + offset_size;
constexpr size_t seed_size = sizeof(uint64_t);
constexpr size_t offset_size = sizeof(int64_t);
constexpr size_t total_size = seed_size + offset_size;
detail::check_rng_state(new_state);

View File

@ -0,0 +1,192 @@
#include <ATen/cuda/CUDAGreenContext.h>
namespace at::cuda {
GreenContext::GreenContext(uint32_t device_id, uint32_t num_sms) {
#if CUDA_HAS_GREEN_CONTEXT
int driver_version;
C10_CUDA_CHECK(cudaDriverGetVersion(&driver_version));
TORCH_CHECK(
driver_version >= 12080, "cuda driver too old to use green context!");
CUcontext pctx = nullptr;
C10_CUDA_DRIVER_CHECK(c10::cuda::DriverAPI::get()->cuCtxGetCurrent_(&pctx));
if (C10_UNLIKELY(!pctx)) {
TORCH_WARN(
"Attempted to create a green context but"
" there was no primary context! Creating a primary context...");
cudaFree(0);
}
CUdevice device;
device_id_ = device_id;
C10_CUDA_DRIVER_CHECK(
c10::cuda::DriverAPI::get()->cuDeviceGet_(&device, device_id));
// Get device resources
CUdevResource device_resource;
C10_CUDA_DRIVER_CHECK(c10::cuda::DriverAPI::get()->cuDeviceGetDevResource_(
device, &device_resource, CU_DEV_RESOURCE_TYPE_SM));
// Split resources
std::vector<CUdevResource> result(1);
auto result_data = result.data();
unsigned int nb_groups = 1;
CUdevResource remaining;
C10_CUDA_DRIVER_CHECK(
c10::cuda::DriverAPI::get()->cuDevSmResourceSplitByCount_(
result_data,
&nb_groups,
&device_resource,
&remaining,
0, // default flags
num_sms));
TORCH_CHECK(nb_groups == 1, "Failed to create single resource group");
// Generate resource descriptor
CUdevResourceDesc desc;
C10_CUDA_DRIVER_CHECK(
c10::cuda::DriverAPI::get()->cuDevResourceGenerateDesc_(
&desc, result_data, 1));
// Create green context
// CU_GREEN_CTX_DEFAULT_STREAM is required per docs:
// https://docs.nvidia.com/cuda/cuda-driver-api/group__CUDA__GREEN__CONTEXTS.html
C10_CUDA_DRIVER_CHECK(c10::cuda::DriverAPI::get()->cuGreenCtxCreate_(
&green_ctx_, desc, device, CU_GREEN_CTX_DEFAULT_STREAM));
// Convert to regular context
C10_CUDA_DRIVER_CHECK(
c10::cuda::DriverAPI::get()->cuCtxFromGreenCtx_(&context_, green_ctx_));
TORCH_CHECK(context_, "Green ctx conversion to regular ctx failed!");
#else
TORCH_CHECK(false, "Green Context is only supported on CUDA 12.8+!");
#endif
}
std::unique_ptr<GreenContext> GreenContext::create(
uint32_t num_sms,
std::optional<uint32_t> device_id) {
#if CUDA_HAS_GREEN_CONTEXT
if (!device_id.has_value()) {
device_id = at::cuda::current_device();
}
return std::make_unique<GreenContext>(device_id.value(), num_sms);
#else
TORCH_CHECK(false, "Green Context is only supported on CUDA 12.8+!");
#endif
}
// Implement move operations
GreenContext::GreenContext(GreenContext&& other) noexcept{
#if CUDA_HAS_GREEN_CONTEXT
device_id_ = std::exchange(other.device_id_, -1);
green_ctx_ = std::exchange(other.green_ctx_, nullptr);
context_ = std::exchange(other.context_, nullptr);
parent_stream_ = std::exchange(other.parent_stream_, nullptr);
#else
TORCH_CHECK(false, "Green Context is only supported on CUDA 12.8+!");
#endif
}
GreenContext& GreenContext::operator=(GreenContext&& other) noexcept{
#if CUDA_HAS_GREEN_CONTEXT
if (this != &other) {
// Clean up current resources
if (green_ctx_) {
CUcontext current = nullptr;
C10_CUDA_DRIVER_CHECK(
c10::cuda::DriverAPI::get()->cuCtxGetCurrent_(&current));
if (current == context_) {
TORCH_CHECK(
false,
"attempting to overwrite current green ctx "
"when it is active!");
}
C10_CUDA_DRIVER_CHECK(c10::cuda::DriverAPI::get()->cuGreenCtxDestroy_(green_ctx_));
}
// Take ownership of other's resources
device_id_ = std::exchange(other.device_id_, -1);
green_ctx_ = std::exchange(other.green_ctx_, nullptr);
context_ = std::exchange(other.context_, nullptr);
parent_stream_ = std::exchange(other.parent_stream_, nullptr);
}
return *this;
#else
TORCH_CHECK(false, "Green Context is only supported on CUDA 12.8+!");
#endif
}
GreenContext::~GreenContext() noexcept{
#if CUDA_HAS_GREEN_CONTEXT
C10_CUDA_DRIVER_CHECK(
c10::cuda::DriverAPI::get()->cuGreenCtxDestroy_(green_ctx_));
#else
TORCH_CHECK(false, "Green Context is only supported on CUDA 12.8+!");
#endif
}
// Get the underlying CUDA context
CUcontext GreenContext::getContext() const {
#if CUDA_HAS_GREEN_CONTEXT
return context_;
#else
TORCH_CHECK(false, "Green Context is only supported on CUDA 12.8+!");
#endif
}
// Get the underlying green context
#if CUDA_HAS_GREEN_CONTEXT
CUgreenCtx GreenContext::getGreenContext() const {
return green_ctx_;
}
#endif
// Make this context current
void GreenContext::setContext() {
#if CUDA_HAS_GREEN_CONTEXT
auto current_stream = c10::cuda::getCurrentCUDAStream();
parent_stream_ = current_stream.stream();
at::cuda::CUDAEvent ev;
ev.record(current_stream);
CUcontext current = nullptr;
C10_CUDA_DRIVER_CHECK(
c10::cuda::DriverAPI::get()->cuCtxGetCurrent_(&current));
if (!current) {
C10_CUDA_DRIVER_CHECK(
c10::cuda::DriverAPI::get()->cuCtxSetCurrent_(context_));
} else {
C10_CUDA_DRIVER_CHECK(
c10::cuda::DriverAPI::get()->cuCtxPushCurrent_(context_));
}
// currently hardcodes the new green context to use the default stream
// TODO(eqy): consider creating a new stream if e.g., it allows interop
// with CUDA Graph captures etc.
auto default_stream = c10::cuda::getDefaultCUDAStream();
ev.block(default_stream);
c10::cuda::setCurrentCUDAStream(default_stream);
#else
TORCH_CHECK(false, "Green Context is only supported on CUDA 12.8+!");
#endif
}
void GreenContext::popContext() {
#if CUDA_HAS_GREEN_CONTEXT
// see above note about stream being hardcoded to the default stream
at::cuda::CUDAEvent ev;
ev.record(c10::cuda::getCurrentCUDAStream());
CUcontext popped;
C10_CUDA_DRIVER_CHECK(
c10::cuda::DriverAPI::get()->cuCtxPopCurrent_(&popped));
TORCH_INTERNAL_ASSERT(
popped == context_, "expected popped context to be the current ctx");
ev.block(c10::cuda::getStreamFromExternal(parent_stream_, device_id_));
#else
TORCH_CHECK(false, "Green Context is only supported on CUDA 12.8+!");
#endif
}
} // namespace at::cuda

View File

@ -0,0 +1,53 @@
#pragma once
#include <ATen/cuda/CUDAEvent.h>
#if defined(CUDA_VERSION) && !defined(USE_ROCM) && defined(PYTORCH_C10_DRIVER_API_SUPPORTED)
#include <c10/cuda/driver_api.h>
#include <cuda.h>
#include <memory>
#include <stdexcept>
#include <vector>
#define CUDA_HAS_GREEN_CONTEXT 1
#else
#define CUDA_HAS_GREEN_CONTEXT 0
#endif
namespace at::cuda {
class TORCH_CUDA_CPP_API GreenContext {
public:
GreenContext(uint32_t device_id, uint32_t num_sms);
static std::unique_ptr<GreenContext> create(uint32_t num_sms, std::optional<uint32_t> device_id);
// Delete copy constructor and assignment
GreenContext(const GreenContext&) = delete;
GreenContext& operator=(const GreenContext&) = delete;
// Implement move operations
GreenContext(GreenContext&& other) noexcept;
GreenContext& operator=(GreenContext&& other) noexcept;
~GreenContext() noexcept;
// Get the underlying CUDA context
CUcontext getContext() const;
// Get the underlying green context
#if CUDA_HAS_GREEN_CONTEXT
CUgreenCtx getGreenContext() const;
#endif
// Make this context current
void setContext();
void popContext();
private:
#if CUDA_HAS_GREEN_CONTEXT
int32_t device_id_ = -1;
CUgreenCtx green_ctx_ = nullptr;
CUcontext context_ = nullptr;
cudaStream_t parent_stream_ = nullptr;
#endif
};
} // namespace at::cuda

View File

@ -0,0 +1,270 @@
#include <cstdint>
#include <c10/util/typeid.h>
#include <c10/util/Exception.h>
#include <c10/util/SmallVector.h>
#include <c10/core/Scalar.h>
#include <c10/core/ScalarType.h>
#include <c10/util/Exception.h>
#define TORCH_ASSERT_ONLY_METHOD_OPERATORS
#include <ATen/core/Tensor.h>
#include <ATen/core/NamedTensor.h>
#include <ATen/Dispatch.h>
#include <ATen/ExpandUtils.h>
#include <ATen/OpMathType.h>
#include <ATen/TensorUtils.h>
#include <ATen/cuda/CUDABlas.h>
#include <ATen/cuda/tunable/Tunable.h>
#include <ATen/cuda/tunable/TunableGemm.h>
#include <ATen/native/Resize.h>
#include <c10/util/MaybeOwned.h>
#include <ATen/native/GroupedMMUtils.h>
#include <ATen/native/cuda/RowwiseScaledMM.h>
#include <ATen/native/cuda/ScaledGroupMM.h>
#include <ATen/native/cuda/GroupMM.h>
#include <ATen/ceil_div.h>
#ifdef USE_FBGEMM_GENAI
#include <fbgemm_gpu/torch_ops.h>
#endif
#ifndef AT_PER_OPERATOR_HEADERS
#include <ATen/Functions.h>
#include <ATen/NativeFunctions.h>
#else
#include <ATen/ops/_addmm_activation_native.h>
#include <ATen/ops/_efficientzerotensor.h>
#include <ATen/ops/_scaled_mm_native.h>
#include <ATen/ops/_unsafe_view_native.h>
#include <ATen/ops/abs.h>
#include <ATen/ops/addmm_native.h>
#include <ATen/ops/addmv_native.h>
#include <ATen/ops/baddbmm_native.h>
#include <ATen/ops/bmm_native.h>
#include <ATen/ops/copy_native.h>
#include <ATen/ops/dot_native.h>
#include <ATen/ops/empty.h>
#include <ATen/ops/empty_strided.h>
#include <ATen/ops/gelu.h>
#include <ATen/ops/max.h>
#include <ATen/ops/mm_native.h>
#include <ATen/ops/mul.h>
#include <ATen/ops/relu.h>
#include <ATen/ops/ones.h>
#include <ATen/ops/scalar_tensor_native.h>
#include <ATen/ops/vdot_native.h>
#endif
using at::blas::ScalingType;
using at::blas::SwizzleType;
namespace at::cuda::scaled {
/**
* Both inputs must be fp8,
* Each needs a single scale, {Tensorwise (float)}
*/
bool check_tensorwise_recipe(c10::ScalarType type_a,
std::vector<ScalingType>& recipe_a,
ArrayRef<Tensor>& scales_a,
c10::ScalarType type_b,
std::vector<ScalingType>& recipe_b,
ArrayRef<Tensor>& scales_b) {
// both types must be fp8
if (!isFloat8Type(type_a) || !isFloat8Type(type_b)) {
return false;
}
// 1 scale each, {Tensorwise, float}
if (scales_a.size() != 1 || recipe_a.size() != 1 || scales_b.size() != 1 || recipe_b.size() != 1) {
return false;
}
// Need {Blockwise_1x32, e8m0} for A & B
if (recipe_a[0] != ScalingType::TensorWise) return false;
if (scales_a[0].scalar_type() != ScalarType::Float) return false;
if (recipe_b[0] != ScalingType::TensorWise) return false;
if (scales_b[0].scalar_type() != ScalarType::Float) return false;
return true;
}
/**
* Both inputs must be fp8,
* Each needs scales, {Rowwise (float)}
*/
bool check_rowwise_recipe(c10::ScalarType type_a,
std::vector<ScalingType>& recipe_a,
ArrayRef<Tensor>& scales_a,
c10::ScalarType type_b,
std::vector<ScalingType>& recipe_b,
ArrayRef<Tensor>& scales_b) {
// both types must be fp8
if (!isFloat8Type(type_a) || !isFloat8Type(type_b)) {
return false;
}
// 1 scale each, {Tensorwise, float}
if (scales_a.size() != 1 || recipe_a.size() != 1 || scales_b.size() != 1 || recipe_b.size() != 1) {
return false;
}
// Need {RowWise, dp32} for A & B
if (recipe_a[0] != ScalingType::RowWise) return false;
if (scales_a[0].scalar_type() != ScalarType::Float) return false;
if (recipe_b[0] != ScalingType::RowWise) return false;
if (scales_b[0].scalar_type() != ScalarType::Float) return false;
return true;
}
/**
* Two-level scaling, canonical NVFP4
* Both inputs must be fp4
* A, B need 2 scales, {Blockwise_1x16 (e4m3), Tensorwise (fp32)}
*/
bool check_nvfp4_recipe(c10::ScalarType type_a,
std::vector<ScalingType>& recipe_a,
ArrayRef<Tensor>& scales_a,
c10::ScalarType type_b,
std::vector<ScalingType>& recipe_b,
ArrayRef<Tensor>& scales_b) {
// both types must be fp4
if (type_a != ScalarType::Float4_e2m1fn_x2 || type_b != ScalarType::Float4_e2m1fn_x2) {
return false;
}
// 2 scales, 2 recipes for each input
if (scales_a.size() != 2 || recipe_a.size() != 2 || scales_b.size() != 2 || recipe_b.size() != 2) {
return false;
}
// Need {Blockwise_1x16, e4m3 for scale[0], Tensorwise, fp32 for scale[1]}
if (recipe_a[0] != ScalingType::BlockWise1x16 || recipe_a[1] != ScalingType::TensorWise) return false;
if (scales_a[0].scalar_type() != ScalarType::Float8_e4m3fn || scales_a[1].scalar_type() != ScalarType::Float) return false;
if (recipe_b[0] != ScalingType::BlockWise1x16 || recipe_b[1] != ScalingType::TensorWise) return false;
if (scales_b[0].scalar_type() != ScalarType::Float8_e4m3fn || scales_b[1].scalar_type() != ScalarType::Float) return false;
return true;
}
/**
* Single-level scaling, what PyT currently understands
* Both inputs must be fp4
* A, B need 1 scale, {Blockwise_1x16 (e4m3)}
*/
bool check_nvfp4_recipe_single_scale
(c10::ScalarType type_a,
std::vector<ScalingType>& recipe_a,
ArrayRef<Tensor>& scales_a,
c10::ScalarType type_b,
std::vector<ScalingType>& recipe_b,
ArrayRef<Tensor>& scales_b) {
// both types must be fp4
if (type_a != ScalarType::Float4_e2m1fn_x2 || type_b != ScalarType::Float4_e2m1fn_x2) {
return false;
}
// 2 scales, 2 recipes for each input
if (scales_a.size() != 1 || recipe_a.size() != 1 || scales_b.size() != 1 || recipe_b.size() != 1) {
return false;
}
// Need {Blockwise_1x16, e4m3 for scale[0], Tensorwise, fp32 for scale[1]}
if (recipe_a[0] != ScalingType::BlockWise1x16) return false;
if (scales_a[0].scalar_type() != ScalarType::Float8_e4m3fn) return false;
if (recipe_b[0] != ScalingType::BlockWise1x16) return false;
if (scales_b[0].scalar_type() != ScalarType::Float8_e4m3fn) return false;
return true;
}
/**
* Both inputs must be fp8
* A, B must only have 1 scale each, A: {Blockwise_1x128 (float), B: {Blockwise_128x128 (float)
*/
bool check_deepseek_recipe(ScalingType expected_recipe_a,
ScalingType expected_recipe_b,
c10::ScalarType type_a,
std::vector<ScalingType>& recipe_a,
ArrayRef<Tensor>& scales_a,
c10::ScalarType type_b,
std::vector<ScalingType>& recipe_b,
ArrayRef<Tensor>& scales_b) {
// both types must be fp8
if (type_a != ScalarType::Float8_e4m3fn || type_b != ScalarType::Float8_e4m3fn) {
return false;
}
// 1 scales, 1 recipes for each input
if (scales_a.size() != 1 || recipe_a.size() != 1 || scales_b.size() != 1 || recipe_b.size() != 1) {
return false;
}
// Need {Blockwise_1x128, float} for A, {Blockwise_128x128, float} for B
if (recipe_a[0] != expected_recipe_a) return false;
if (scales_a[0].scalar_type() != ScalarType::Float) return false;
if (recipe_b[0] != expected_recipe_b) return false;
if (scales_b[0].scalar_type() != ScalarType::Float) return false;
return true;
}
/**
* Both inputs must be fp8
* A, B must have 1 scale each, {Blockwise_1x32, e8m0}
*/
bool check_mxfp8_recipe(c10::ScalarType type_a,
std::vector<ScalingType>& recipe_a,
ArrayRef<Tensor>& scales_a,
c10::ScalarType type_b,
std::vector<ScalingType>& recipe_b,
ArrayRef<Tensor>& scales_b) {
// both types must be fp8
if (type_a != ScalarType::Float8_e4m3fn || type_b != ScalarType::Float8_e4m3fn) {
return false;
}
// 1 scales, 1 recipes for each input
if (scales_a.size() != 1 || recipe_a.size() != 1 || scales_b.size() != 1 || recipe_b.size() != 1) {
return false;
}
// Need {Blockwise_1x32, e8m0} for A & B
if (recipe_a[0] != ScalingType::BlockWise1x32) return false;
if (scales_a[0].scalar_type() != ScalarType::Float8_e8m0fnu) return false;
if (recipe_b[0] != ScalingType::BlockWise1x32) return false;
if (scales_b[0].scalar_type() != ScalarType::Float8_e8m0fnu) return false;
return true;
}
/**
* Both inputs must be fp4
* A, B must have 1 scale each, {Blockwise_1x32, e8m0}
*/
bool check_mxfp4_recipe(c10::ScalarType type_a,
std::vector<ScalingType>& recipe_a,
ArrayRef<Tensor>& scales_a,
c10::ScalarType type_b,
std::vector<ScalingType>& recipe_b,
ArrayRef<Tensor>& scales_b) {
// both types must be fp4
if (type_a != ScalarType::Float4_e2m1fn_x2 || type_b != ScalarType::Float4_e2m1fn_x2) {
return false;
}
// 1 scales, 1 recipes for each input
if (scales_a.size() != 1 || recipe_a.size() != 1 || scales_b.size() != 1 || recipe_b.size() != 1) {
return false;
}
// Need {Blockwise_1x32, e8m0} for A & B
if (recipe_a[0] != ScalingType::BlockWise1x32) return false;
if (scales_a[0].scalar_type() != ScalarType::Float8_e8m0fnu) return false;
if (recipe_b[0] != ScalingType::BlockWise1x32) return false;
if (scales_b[0].scalar_type() != ScalarType::Float8_e8m0fnu) return false;
return true;
}
} // namespace at::native::cuda::blas::scaled

View File

@ -0,0 +1,174 @@
#include <cstdint>
#include <c10/util/typeid.h>
#include <c10/util/Exception.h>
#include <c10/util/SmallVector.h>
#include <c10/core/Scalar.h>
#include <c10/core/ScalarType.h>
#include <c10/util/Exception.h>
#define TORCH_ASSERT_ONLY_METHOD_OPERATORS
#include <ATen/core/Tensor.h>
#include <ATen/core/NamedTensor.h>
#include <ATen/Dispatch.h>
#include <ATen/ExpandUtils.h>
#include <ATen/OpMathType.h>
#include <ATen/TensorUtils.h>
#include <ATen/cuda/CUDABlas.h>
#include <ATen/cuda/tunable/Tunable.h>
#include <ATen/cuda/tunable/TunableGemm.h>
#include <ATen/native/Resize.h>
#include <c10/util/MaybeOwned.h>
#include <ATen/native/GroupedMMUtils.h>
#include <ATen/native/cuda/RowwiseScaledMM.h>
#include <ATen/native/cuda/ScaledGroupMM.h>
#include <ATen/native/cuda/GroupMM.h>
#include <ATen/ceil_div.h>
#ifdef USE_FBGEMM_GENAI
#include <fbgemm_gpu/torch_ops.h>
#endif
#ifndef AT_PER_OPERATOR_HEADERS
#include <ATen/Functions.h>
#include <ATen/NativeFunctions.h>
#else
#include <ATen/ops/_addmm_activation_native.h>
#include <ATen/ops/_efficientzerotensor.h>
#include <ATen/ops/_scaled_mm_native.h>
#include <ATen/ops/_unsafe_view_native.h>
#include <ATen/ops/abs.h>
#include <ATen/ops/addmm_native.h>
#include <ATen/ops/addmv_native.h>
#include <ATen/ops/baddbmm_native.h>
#include <ATen/ops/bmm_native.h>
#include <ATen/ops/copy_native.h>
#include <ATen/ops/dot_native.h>
#include <ATen/ops/empty.h>
#include <ATen/ops/empty_strided.h>
#include <ATen/ops/gelu.h>
#include <ATen/ops/max.h>
#include <ATen/ops/mm_native.h>
#include <ATen/ops/mul.h>
#include <ATen/ops/relu.h>
#include <ATen/ops/ones.h>
#include <ATen/ops/scalar_tensor_native.h>
#include <ATen/ops/vdot_native.h>
#endif
using at::blas::ScalingType;
using at::blas::SwizzleType;
namespace at::cuda::scaled {
static bool _scaled_mm_allowed_device(bool sm90_only=false, bool sm100_only=false) {
#ifdef USE_ROCM
static const std::vector<std::string> archs = {
"gfx942",
#if ROCM_VERSION >= 60300
"gfx1200", "gfx1201",
#endif
#if ROCM_VERSION >= 60500
"gfx950"
#endif
};
return at::detail::getCUDAHooks().isGPUArch(archs);
#else
auto dprops = at::cuda::getCurrentDeviceProperties();
if (sm90_only || sm100_only) {
return (sm90_only && dprops->major == 9) || (sm100_only && dprops->major == 10);
} else {
return dprops->major >= 9 || (dprops->major == 8 && dprops->minor == 9);
}
#endif
}
#ifdef USE_ROCM
static bool _scaled_mm_is_fnuz() {
return at::detail::getCUDAHooks().isGPUArch({"gfx942"});
}
#endif
/**
* Track concrete implementations available
*/
enum class ScaledGemmImplementation {
NONE = 0,
TENSORWISE_TENSORWISE = 1,
ROWWISE_ROWWISE = 2,
BLOCK_128x128_1x128 = 3,
BLOCK_1x128_128x128 = 4,
BLOCK_1x128_1x128 = 5,
MXFP8_MXFP8 = 6,
NVFP4_NVFP4 = 7,
NVFP4_NVFP4_SINGLE_SCALE = 8,
MXFP4_MXFP4 = 9,
};
/**
* Convert passed int (enum) from python back into a
* strictly-typed enum
*/
template <class EnumType, class ArrayType>
std::vector<EnumType> convert_int_to_enum(ArrayType& v) {
std::vector<EnumType> converted;
converted.reserve(v.size());
for (auto vi : v) {
converted.push_back(static_cast<EnumType>(vi));
}
return converted;
}
bool check_tensorwise_recipe(c10::ScalarType,
std::vector<ScalingType>&,
ArrayRef<Tensor>&,
c10::ScalarType,
std::vector<ScalingType>&,
ArrayRef<Tensor>&);
bool check_rowwise_recipe(c10::ScalarType,
std::vector<ScalingType>&,
ArrayRef<Tensor>&,
c10::ScalarType,
std::vector<ScalingType>&,
ArrayRef<Tensor>&);
bool check_nvfp4_recipe(c10::ScalarType,
std::vector<ScalingType>&,
ArrayRef<Tensor>&,
c10::ScalarType,
std::vector<ScalingType>&,
ArrayRef<Tensor>&);
bool check_nvfp4_recipe_single_scale
(c10::ScalarType,
std::vector<ScalingType>&,
ArrayRef<Tensor>&,
c10::ScalarType,
std::vector<ScalingType>&,
ArrayRef<Tensor>&);
bool check_deepseek_recipe(ScalingType,
ScalingType,
c10::ScalarType,
std::vector<ScalingType>&,
ArrayRef<Tensor>&,
c10::ScalarType,
std::vector<ScalingType>&,
ArrayRef<Tensor>&);
bool check_mxfp8_recipe(c10::ScalarType,
std::vector<ScalingType>&,
ArrayRef<Tensor>&,
c10::ScalarType,
std::vector<ScalingType>&,
ArrayRef<Tensor>&);
bool check_mxfp4_recipe(c10::ScalarType,
std::vector<ScalingType>&,
ArrayRef<Tensor>&,
c10::ScalarType,
std::vector<ScalingType>&,
ArrayRef<Tensor>&);
} // namespace at::native::cuda::blas::scaled

View File

@ -183,11 +183,6 @@ struct CUDACachingHostAllocatorImpl
return true;
}
bool pinned_use_background_threads() override {
return c10::cuda::CUDACachingAllocator::CUDAAllocatorConfig::
pinned_use_background_threads();
}
EventPool::Event create_event_internal(DeviceIndex idx) {
// Leak the event pool to avoid shutdown issue.
static auto* event_pool = new EventPool();

View File

@ -70,11 +70,7 @@
#define ATEN_CUB_MAXIMUM() NO_ROCM(at_cuda_detail)ROCM_HIPCUB(::cub)::Max()
#endif
#if (!defined(USE_ROCM) && !CUB_SUPPORTS_NV_BFLOAT16()) || defined(USE_ROCM)
#if !defined(USE_ROCM)
namespace at_cuda_detail {
#endif
#if defined(USE_ROCM)
// backport https://github.com/NVIDIA/cub/pull/306 for c10::BFloat16
@ -96,10 +92,6 @@ template <>
struct ROCM_HIPCUB(cub)::NumericTraits<c10::BFloat16>:
ROCM_HIPCUB(cub)::BaseTraits<ROCM_HIPCUB(cub)::FLOATING_POINT, true, false, unsigned short, c10::BFloat16> {};
#if !defined(USE_ROCM)
} // namespace at_cuda_detail
#endif
#endif
#if !defined(USE_ROCM)
@ -121,7 +113,7 @@ struct cuda_type<c10::Half> {
using type = __half;
};
#if !defined(USE_ROCM) && CUB_SUPPORTS_NV_BFLOAT16()
#if !defined(USE_ROCM)
template<>
struct cuda_type<c10::BFloat16> {
@ -177,7 +169,6 @@ inline void segmented_sort_pairs(
}
}
#if CUB_SUPPORTS_UNIQUE_BY_KEY()
template <typename KeysInputIteratorT, typename ValuesInputIteratorT, typename ValuesOutputIteratorT, typename NumSelectedIteratorT>
inline void unique_by_key(
KeysInputIteratorT keys_in, ValuesInputIteratorT values_in,
@ -193,7 +184,6 @@ inline void unique_by_key(
CUB_WRAPPER(NO_ROCM(at_cuda_detail)::cub::DeviceSelect::UniqueByKey,
keys_in, values_in, keys_out_, values_out, num_selected, num_input_items, c10::cuda::getCurrentCUDAStream());
}
#endif
namespace impl {
@ -205,36 +195,6 @@ __global__ void transform_vals(InputIteratorT1 a, InputIteratorT2 b, OutputItera
*out = scan_op(static_cast<acc_t>(*a), static_cast<acc_t>(*b));
}
#if !CUB_SUPPORTS_FUTURE_VALUE()
template<typename ValueT, typename InputIteratorT>
struct chained_iterator {
using iterator_category = std::random_access_iterator_tag;
using difference_type = std::ptrdiff_t;
using value_type = ValueT;
using pointer = ValueT*;
using reference = ValueT&;
InputIteratorT iter;
ValueT *first;
difference_type offset = 0;
__device__ ValueT operator[](difference_type i) {
i += offset;
if (i == 0) {
return *first;
} else {
return ValueT(iter[i - 1]);
}
}
__device__ chained_iterator operator+(difference_type i) {
return chained_iterator{iter, first, i};
}
__device__ ValueT operator*() {
return (*this)[0];
}
};
#endif
// even though cub is supposed to support tensors with int_max elements, in reality it doesn't,
// so split at int_max/2
constexpr int max_cub_size = std::numeric_limits<int>::max() / 2 + 1; // 2**30
@ -279,25 +239,6 @@ inline void inclusive_scan(InputIteratorT input, OutputIteratorT output, ScanOpT
first_elem_ptr,
scan_op);
C10_CUDA_KERNEL_LAUNCH_CHECK();
#if !CUB_SUPPORTS_FUTURE_VALUE()
using ArgIndexInputIterator = NO_ROCM(at_cuda_detail)::cub::ArgIndexInputIterator<InputIteratorT>;
using tuple = typename ArgIndexInputIterator::value_type;
auto input_iter_transform = [=] __device__ (const tuple &x)->input_t {
if (x.key == 0) {
return *first_elem_ptr;
} else {
return x.value;
}
};
auto input_ = ATEN_CUB_TRANSFORM_ITERATOR(input_t, decltype(input_iter_transform), ArgIndexInputIterator)(
ArgIndexInputIterator(input + i), input_iter_transform);
CUB_WRAPPER(NO_ROCM(at_cuda_detail)::cub::DeviceScan::InclusiveScan,
input_,
output + i,
scan_op,
size_cub,
at::cuda::getCurrentCUDAStream());
#else
CUB_WRAPPER(NO_ROCM(at_cuda_detail)::cub::DeviceScan::ExclusiveScan,
input + i + 1,
output + i,
@ -305,7 +246,6 @@ inline void inclusive_scan(InputIteratorT input, OutputIteratorT output, ScanOpT
::at_cuda_detail::cub::FutureValue<input_t>(first_elem_ptr),
size_cub,
at::cuda::getCurrentCUDAStream());
#endif
}
#endif
}
@ -557,16 +497,6 @@ inline void exclusive_scan(InputIteratorT input, OutputIteratorT output, ScanOpT
first_elem_ptr,
scan_op);
C10_CUDA_KERNEL_LAUNCH_CHECK();
#if !CUB_SUPPORTS_FUTURE_VALUE()
auto input_ = impl::chained_iterator<InitValueT, InputIteratorT>{
input + i, first_elem_ptr};
CUB_WRAPPER(NO_ROCM(at_cuda_detail)::cub::DeviceScan::InclusiveScan,
input_,
output + i,
scan_op,
size_cub,
at::cuda::getCurrentCUDAStream());
#else
CUB_WRAPPER(NO_ROCM(at_cuda_detail)::cub::DeviceScan::ExclusiveScan,
input + i,
output + i,
@ -574,12 +504,10 @@ inline void exclusive_scan(InputIteratorT input, OutputIteratorT output, ScanOpT
::at_cuda_detail::cub::FutureValue<InitValueT>(first_elem_ptr),
size_cub,
at::cuda::getCurrentCUDAStream());
#endif
}
#endif
}
#if CUB_SUPPORTS_SCAN_BY_KEY()
template <typename KeysInputIteratorT, typename ValuesInputIteratorT, typename ValuesOutputIteratorT>
inline void inclusive_sum_by_key(KeysInputIteratorT keys, ValuesInputIteratorT input, ValuesOutputIteratorT output, int64_t num_items) {
@ -607,7 +535,6 @@ inline void inclusive_scan_by_key(KeysInputIteratorT keys, ValuesInputIteratorT
#endif
}
#endif
template <typename InputIteratorT, typename OutputIteratorT, typename NumSelectedIteratorT>
void unique(InputIteratorT input, OutputIteratorT output,

View File

@ -10,14 +10,6 @@
#define CUB_VERSION 200001
#endif
// cub sort support for __nv_bfloat16 is added to cub 1.13 in:
// https://github.com/NVIDIA/cub/pull/306
#if CUB_VERSION >= 101300
#define CUB_SUPPORTS_NV_BFLOAT16() true
#else
#define CUB_SUPPORTS_NV_BFLOAT16() false
#endif
// cub support for CUB_WRAPPED_NAMESPACE is added to cub 1.13.1 in:
// https://github.com/NVIDIA/cub/pull/326
// CUB_WRAPPED_NAMESPACE is defined globally in cmake/Dependencies.cmake
@ -28,30 +20,6 @@
#define USE_GLOBAL_CUB_WRAPPED_NAMESPACE() false
#endif
// cub support for UniqueByKey is added to cub 1.16 in:
// https://github.com/NVIDIA/cub/pull/405
#if CUB_VERSION >= 101600
#define CUB_SUPPORTS_UNIQUE_BY_KEY() true
#else
#define CUB_SUPPORTS_UNIQUE_BY_KEY() false
#endif
// cub support for scan by key is added to cub 1.15
// in https://github.com/NVIDIA/cub/pull/376
#if CUB_VERSION >= 101500
#define CUB_SUPPORTS_SCAN_BY_KEY() 1
#else
#define CUB_SUPPORTS_SCAN_BY_KEY() 0
#endif
// cub support for cub::FutureValue is added to cub 1.15 in:
// https://github.com/NVIDIA/cub/pull/305
#if CUB_VERSION >= 101500
#define CUB_SUPPORTS_FUTURE_VALUE() true
#else
#define CUB_SUPPORTS_FUTURE_VALUE() false
#endif
// There were many bc-breaking changes in major version release of CCCL v3.0.0
// Please see https://nvidia.github.io/cccl/cccl/3.0_migration_guide.html
#if CUB_VERSION >= 200800

View File

@ -0,0 +1,54 @@
#include <ATen/Functions.h>
#include <ATen/Tensor.h>
#include <ATen/cuda/Exceptions.h>
#include <mutex>
namespace at {
namespace cuda {
namespace detail {
__device__ __constant__ float cublas_one_device;
__device__ __constant__ float cublas_zero_device;
float *get_cublas_device_one() {
static c10::once_flag init_flag;
c10::call_once(init_flag, []() {
const float one = 1.f;
AT_CUDA_CHECK(cudaMemcpyToSymbol(cublas_one_device, &one, sizeof(float)));
});
float *ptr;
AT_CUDA_CHECK(cudaGetSymbolAddress(reinterpret_cast<void**>(&ptr), cublas_one_device));
return ptr;
}
float *get_cublas_device_zero() {
static c10::once_flag init_flag;
c10::call_once(init_flag, []() {
const float zero = 0.f;
AT_CUDA_CHECK(cudaMemcpyToSymbol(cublas_zero_device, &zero, sizeof(float)));
});
float *ptr;
AT_CUDA_CHECK(cudaGetSymbolAddress(reinterpret_cast<void**>(&ptr), cublas_zero_device));
return ptr;
}
float *get_user_alpha_ptr() {
static float *alpha_ptr;
static c10::once_flag init_flag;
c10::call_once(init_flag, []() {
AT_CUDA_CHECK(cudaMalloc(&alpha_ptr, sizeof(float)));
});
return alpha_ptr;
}
} // namespace detail
} // namespace cuda
} // namespace at

View File

@ -0,0 +1,11 @@
#pragma once
#include <ATen/core/TensorBase.h>
namespace at::cuda::detail {
float *get_cublas_device_one();
float *get_cublas_device_zero();
float *get_user_alpha_ptr();
} // namespace at::cuda::detail

View File

@ -13,6 +13,7 @@
#include <c10/core/ScalarType.h>
#include <ATen/cuda/tunable/TunableOp.h>
#include <ATen/cuda/tunable/Tunable.h>
#include <ATen/cuda/CUDABlas.h>
#include <ATen/cuda/Exceptions.h>
#include <c10/util/StringUtil.h>
@ -150,6 +151,7 @@ inline std::string ScalarTypeToBLASType(c10::ScalarType scalar_type) {
BLASType = "unknown";
}
return BLASType;
}
// Similar to Compute Type in GemmRocblas.h
@ -244,33 +246,25 @@ inline std::string to_string_epilogue(const at::cuda::blas::GEMMAndBiasActivatio
namespace detail {
static bool NumericalCheck(ScalarType dtype, void* c, void* other_c, int64_t size) {
static bool NumericalCheck(ScalarType dtype, void* c, void* other_c, int64_t size, const NumericalCheckConfig& config) {
if (!config.enabled) {
return true; // skip when disabled
}
auto options = at::TensorOptions().dtype(dtype).device(at::kCUDA);
// comparison done as 1D tensor
at::Tensor ref = at::from_blob(c, {size}, options);
at::Tensor oth = at::from_blob(other_c, {size}, options);
at::Tensor ref_float = ref.to(at::kFloat);
at::Tensor oth_float = oth.to(at::kFloat);
std::vector<double> atols{1e-1, 1e-2, 1e-3, 1e-4, 1e-5};
std::vector<double> rtols{1e-1, 1e-2, 1e-3, 1e-4, 1e-5};
double last_succeed_atol = 1;
double last_succeed_rtol = 1;
for (auto& atol : atols) {
for (auto& rtol : rtols) {
if (at::allclose(ref_float, oth_float, rtol, atol)) {
last_succeed_atol = atol;
last_succeed_rtol = rtol;
}
}
}
if (last_succeed_atol == 1) {
return false;
}
else {
TUNABLE_LOG3("├──verify numerics: atol=", last_succeed_atol, ", rtol=", last_succeed_rtol);
}
return true;
const bool ok = at::allclose(ref_float, oth_float, config.rtol, config.atol);
if (ok) {
TUNABLE_LOG3("├──verify numerics: PASSED with atol=", config.atol, ", rtol=", config.rtol);
} else {
TUNABLE_LOG3("├──verify numerics: FAILED with atol=", config.atol, ", rtol=", config.rtol);
}
return ok;
}
}
@ -355,8 +349,10 @@ struct GemmParams : OpParams {
}
TuningStatus NumericalCheck(GemmParams<T> *other) {
auto* ctx = getTuningContext();
auto cfg = ctx->GetNumericalCheckConfig();
auto c_dtype = c10::CppTypeToScalarType<T>::value;
return detail::NumericalCheck(c_dtype, c, other->c, GetSizeC()/sizeof(T)) ? OK : FAIL;
return detail::NumericalCheck(c_dtype, c, other->c, GetSizeC()/sizeof(T), cfg) ? OK : FAIL;
}
char transa{};
@ -449,8 +445,10 @@ struct GemmAndBiasParams : OpParams {
}
TuningStatus NumericalCheck(GemmAndBiasParams<T> *other) {
auto* ctx = getTuningContext();
auto cfg = ctx->GetNumericalCheckConfig();
auto c_dtype = c10::CppTypeToScalarType<T>::value;
return detail::NumericalCheck(c_dtype, c, other->c, GetSizeC()/sizeof(T)) ? OK : FAIL;
return detail::NumericalCheck(c_dtype, c, other->c, GetSizeC()/sizeof(T), cfg) ? OK : FAIL;
}
char transa{};
@ -546,8 +544,10 @@ struct GemmStridedBatchedParams : OpParams {
}
TuningStatus NumericalCheck(GemmStridedBatchedParams<T> *other) {
auto* ctx = getTuningContext();
auto cfg = ctx->GetNumericalCheckConfig();
auto c_dtype = c10::CppTypeToScalarType<C_Dtype>::value;
return detail::NumericalCheck(c_dtype, c, other->c, GetSizeC()/sizeof(T)) ? OK : FAIL;
return detail::NumericalCheck(c_dtype, c, other->c, GetSizeC()/sizeof(T), cfg) ? OK : FAIL;
}
char transa{};
@ -663,7 +663,9 @@ struct ScaledGemmParams : OpParams {
}
TuningStatus NumericalCheck(ScaledGemmParams<T> *other) {
return detail::NumericalCheck(c_dtype, c, other->c, GetSizeC()/sizeof(T)) ? OK : FAIL;
auto* ctx = getTuningContext();
auto cfg = ctx->GetNumericalCheckConfig();
return detail::NumericalCheck(c_dtype, c, other->c, GetSizeC()/sizeof(T), cfg) ? OK : FAIL;
}
char transa{};

View File

@ -145,7 +145,7 @@ programmatically since the settings become fixed. Use the C++ or Python APIs ins
| PYTORCH_TUNABLEOP_VERBOSE | Default is 0. Set to 1 to enable basic logging. 2 for basic tuning status. 3 for full trace. |
| PYTORCH_TUNABLEOP_VERBOSE_FILENAME | Default is "err" for stderr. Set to "out" for stdout or a filename for capturing verbose logging. |
| PYTORCH_TUNABLEOP_FILENAME | Default is 'tunableop_results.csv'. |
| PYTORCH_TUNABLEOP_NUMERICAL_CHECK | Default is 0. Set to 1 to enable. |
| PYTORCH_TUNABLEOP_NUMERICAL_CHECK | Default is off. Set 'atol_rtol' to enable, for example "1e-5_1e-5". |
| PYTORCH_TUNABLEOP_ROCBLAS_ENABLED | Default is 1. Set to 0 to disable rocblas being considered during tuning. |
| PYTORCH_TUNABLEOP_HIPBLASLT_ENABLED | Default is 1. Set to 0 to disable hipblaslt being considered during tuning. |
| PYTORCH_TUNABLEOP_MAX_TUNING_DURATION_MS | Default is 30. Unit is milliseconds. |
@ -173,10 +173,9 @@ All python APIs exist in the `torch.cuda.tunable` module.
| get_max_tuning_iterations() -> int | |
| set_filename(filename: str, insert_device_ordinal: bool = False) -> None | |
| get_filename() -> str | |
| set_numerical_check_tolerances(enable: bool, atol: float, rtol: float) -> None | Enable or disable numerical checking; atol and rtol default to 1e-5.
| get_results() -> Tuple[str, str, str, float] | |
| get_validators() -> Tuple[str, str] | |
| write_file_on_exit(val: bool) -> None | Default is True. |
| write_file(filename: Optional[str] = None) -> None | If filename not given, it will call get_filename(). |
| read_file(filename: Optional[str] = None) -> None | If filename not given, it will call get_filename(). |
| tune_gemm_in_file(filename: str) -> None | read an untuned file and tune GEMMs in it. |
| mgpu_tune_gemm_in_file(filename_pattern: str, num_gpus: int) -> None: -> None | read one or more untuned files and tune all unique GEMMs on one or more GPUs. |

View File

@ -107,14 +107,30 @@ void TuningResultsManager::AddImpl(const std::string& op_signature,
}
void TuningResultsManager::Add(const std::string& op_signature, const std::string& params_signature, ResultEntry best) {
std::scoped_lock l{lock_};
bool is_new = false;
ResultEntry inserted = ResultEntry::Null();
auto it = results_.find(op_signature);
if (it == results_.end()) {
it = results_.insert({op_signature, {}}).first;
// ---- mutate maps under results lock ----
{
std::scoped_lock l{lock_};
auto& km = results_[op_signature]; // creates if missing
is_new = (km.find(params_signature) == km.end());
AddImpl(op_signature, params_signature, std::move(best), km);
if (is_new) {
inserted = km.at(params_signature); // snapshot for I/O after unlocking
}
}
if (!is_new) return; // only write once per unique (op, params)
TuningContext* ctx = getTuningContext();
if (ctx->IsTuningEnabled() && !ctx->IsRecordUntunedEnabled()) {
InitRealtimeAppend(ctx->GetFilename(), ctx->GetTuningResultsValidator().GetAllValidators());
if (is_new && realtime_out_ && realtime_out_->good()) {
AppendResultLine(op_signature, params_signature, inserted);
}
}
AddImpl(op_signature, params_signature, std::move(best), it->second);
}
void TuningResultsManager::RecordUntuned( std::ofstream& untuned_file, const std::string& op_signature,
@ -150,6 +166,77 @@ void TuningResultsManager::RecordUntuned( std::ofstream& untuned_file, const std
}
}
void TuningResultsManager::InitRealtimeAppend(const std::string& filename, const std::unordered_map<std::string, std::string>& validators) {
std::scoped_lock fl{realtime_file_mutex_};
if (realtime_out_ && realtime_out_->good() && realtime_filename_ == filename) {
return;
}
if (realtime_out_ && realtime_filename_ != filename) {
realtime_out_->flush();
realtime_out_->close();
realtime_out_.reset();
validators_written_ = false;
}
bool file_exists = false;
bool file_empty = true;
{
std::ifstream check_file(filename);
if (check_file.good()) {
file_exists = true;
file_empty = (check_file.peek() == std::ifstream::traits_type::eof());
}
}
realtime_out_ = std::make_unique<std::ofstream>(filename, std::ios::out | std::ios::app);
if (!realtime_out_->good()) {
TORCH_WARN("TunableOp realtime append: failed to open '", filename,"'");
realtime_out_.reset();
return;
}
if(!file_exists || file_empty) {
for(const auto& [key, val] : validators) {
(*realtime_out_) << "Validator," << key << "," << val << std::endl;
realtime_out_->flush();
}
validators_written_ = true;
TUNABLE_LOG2("Wrote validators to realtime output file");
}
realtime_filename_ = filename;
}
void TuningResultsManager::AppendResultLine(const std::string& op_sig, const std::string& param_sig, const ResultEntry& result) {
std::scoped_lock fl{realtime_file_mutex_};
if(!realtime_out_ || !realtime_out_->good()) {
return;
}
(*realtime_out_) << op_sig << "," << param_sig << "," << result << std::endl;
realtime_out_->flush(); //ensure immediate write to disk
TUNABLE_LOG3("Realtime append: ", op_sig, "(", param_sig, ") -> ", result);
}
void TuningResultsManager::CloseRealtimeAppend() {
std::scoped_lock fl{realtime_file_mutex_};
if(realtime_out_) {
realtime_out_->flush();
realtime_out_->close();
realtime_out_.reset();
TUNABLE_LOG2("Closed realtime output file");
}
}
void TuningResultsManager::Delete(const std::string& op_signature, const std::string& params_signature) {
std::scoped_lock l{lock_};
@ -396,7 +483,6 @@ TuningContext::TuningContext() :
tuning_enable_{true},
record_untuned_enable_{false},
manager_initialized_{false},
write_file_on_exit_{true},
numerics_check_enable_{false},
max_tuning_duration_ms_{30},
max_tuning_iterations_{100},
@ -417,20 +503,8 @@ TuningContext::~TuningContext() {
// but doesn't do any computation itself.
return;
}
auto filename = GetFilename();
if (IsTunableOpEnabled() && IsTuningEnabled() && !filename.empty() && write_file_on_exit_) {
if (results_count_from_input_file_ < GetTuningResultsManager().GetSize()) {
if (results_count_from_input_file_ > 0) {
TUNABLE_LOG1("additional tuning results available, rewriting file ", filename);
}
else {
TUNABLE_LOG1("writing file ", filename);
}
if (!WriteFile(filename)) {
TUNABLE_LOG1("failed to write file ", filename);
}
}
}
TUNABLE_LOG1("Closing File");
GetTuningResultsManager().CloseRealtimeAppend(); // Since, we do instant logging by default now.
if (untuned_file_.good()) {
untuned_file_.close();
@ -511,20 +585,54 @@ std::ofstream& TuningContext::GetUntunedFile(){
return untuned_file_;
}
void TuningContext::WriteFileOnExit(bool value) {
write_file_on_exit_ = value;
}
void TuningContext::EnableNumericsCheck(bool value) {
numerics_check_enable_ = value;
}
bool TuningContext::IsNumericsCheckEnabled() const {
const auto env = c10::utils::get_env("PYTORCH_TUNABLEOP_NUMERICAL_CHECK");
if (env == "1") {
return true;
NumericalCheckConfig TuningContext::GetNumericalCheckConfig() const {
const auto env_opt = c10::utils::get_env("PYTORCH_TUNABLEOP_NUMERICAL_CHECK");
if (!env_opt.has_value()) {
return numerics_cfg_;
}
return numerics_check_enable_;
const std::string& env = env_opt.value();
if (env == "0") {
return NumericalCheckConfig(false, 1e-5, 1e-5);
}
const size_t underscore = env.find('_');
TORCH_CHECK(
underscore != std::string::npos,
"Invalid PYTORCH_TUNABLEOP_NUMERICAL_CHECK format. "
"Expected 'atol_rtol', got: ",
env);
double atol = 0.0;
double rtol = 0.0;
try {
atol = std::stod(env.substr(0, underscore));
rtol = std::stod(env.substr(underscore + 1));
} catch (const std::exception& e) {
TORCH_CHECK(false, "Failed to parse PYTORCH_TUNABLEOP_NUMERICAL_CHECK: ", e.what());
}
TORCH_CHECK( atol > 0.0 && rtol > 0.0, "Tolerance values must be positive. atol=", atol, ", rtol=", rtol);
return NumericalCheckConfig(true, atol, rtol);
}
void TuningContext::SetNumericalCheckConfig(bool enabled, double atol, double rtol) {
TORCH_CHECK(atol > 0.0 && rtol > 0.0, "Numerical check tolerances must be positive");
numerics_cfg_ = {enabled, atol, rtol};
}
bool TuningContext::IsNumericsCheckEnabled() const {
const auto cfg = GetNumericalCheckConfig();
return cfg.enabled || numerics_check_enable_;
}
void TuningContext::SetMaxTuningDurationMs(int max_duration_ms) {
@ -634,11 +742,6 @@ TuningResultsManager& TuningContext::GetTuningResultsManager() {
auto filename = GetFilename();
if (!filename.empty() && !IsRecordUntunedEnabled()) {
ReadFile(filename);
// attempt immediately to open file for writing to catch errors early
std::ofstream file(filename, std::ios::out | std::ios::app);
if (!file.good()) {
TORCH_WARN("failed to open file '", filename, "' for writing; your tuning results will not be saved");
}
}
});
return manager_;
@ -744,27 +847,6 @@ bool TuningContext::ReadFile(const std::string& filename_) {
return true;
}
bool TuningContext::WriteFile(const std::string& filename_) {
std::string filename = filename_.empty() ? GetFilename() : filename_;
std::ofstream file(filename, std::ios::out | std::ios::trunc);
if (!file.good()) {
TUNABLE_LOG1("error opening tuning results file for writing ", filename);
return false;
}
auto validators = GetTuningResultsValidator().GetAllValidators();
for (const auto& [key, val] : validators) {
file << "Validator," << key << "," << val << std::endl;
}
auto results = GetTuningResultsManager().Dump();
for (const auto& [op_sig, kernelmap] : results) {
for (const auto& [param_sig, result] : kernelmap) {
file << op_sig << "," << param_sig << "," << result << std::endl;
}
}
file.close();
return true;
}
namespace {
struct MaybeDelete {

Some files were not shown because too many files have changed in this diff Show More