diff options
author | wangjiezhe <wangjiezhe@gmail.com> | 2024-01-20 14:43:03 +0800 |
---|---|---|
committer | Jason Zaman <perfinion@gentoo.org> | 2024-02-03 11:32:55 -0800 |
commit | e4bdcb0793343eeba56db58ddecf73a0eea0e445 (patch) | |
tree | a3fe2bcfdc8f9999f2d73bdb10b415dbda2c144f /sci-libs | |
parent | sci-visualization/tensorboard: add 2.14.1 (diff) | |
download | gentoo-e4bdcb0793343eeba56db58ddecf73a0eea0e445.tar.gz gentoo-e4bdcb0793343eeba56db58ddecf73a0eea0e445.tar.bz2 gentoo-e4bdcb0793343eeba56db58ddecf73a0eea0e445.zip |
sci-libs/tensorflow: add 2.14.1
Signed-off-by: wangjiezhe <wangjiezhe@gmail.com>
Signed-off-by: Jason Zaman <perfinion@gentoo.org>
Diffstat (limited to 'sci-libs')
15 files changed, 3982 insertions, 0 deletions
diff --git a/sci-libs/tensorflow/Manifest b/sci-libs/tensorflow/Manifest index 6e541231fe2d..8d8e59ab8231 100644 --- a/sci-libs/tensorflow/Manifest +++ b/sci-libs/tensorflow/Manifest @@ -6,6 +6,7 @@ DIST XNNPACK-659147817805d17c7be2d60bd7bbca7e780f9c82.zip 20341886 BLAKE2B 9aa37 DIST XNNPACK-b9d4073a6913891ce9cbd8965c8d506075d2a45a.zip 24168206 BLAKE2B ba6d612cb09823a655f065a76bd9b956a37664eade90aede2d30d9892f6bcfa2c6134f5332eb31247bea6b46e51f47250ae09a6810bde181c72f715550811f49 SHA512 29c844e758ec2f2913dc477866b016afc04679f16da5774069dabbc9373ed210d510c4b1205eb681de20669e49f34098f490340f5524139b079461589f41e7b0 DIST XNNPACK-e8f74a9763aa36559980a0c2f37f587794995622.zip 18756888 BLAKE2B 0a1787166e8bbfda4aa6010075d92573112a21f3f9d3b1c13bc931fae6fa4cafb71685e4c57e86d7a662912bb6431c2d39a24378bf82361b50e5855d1b62f524 SHA512 a6802f0995742af0ca82de010cbd42da230b36cc884612d4ba2de20ba0ca56da6a11209bfb01ee1a5ddc31dc891a69438fa4836ec9d62d56e32c6aa144c6e7aa DIST apple_support.1.1.0.tar.gz 27105 BLAKE2B 6982ed0188760caeb6951dd28d211449d37a3192fa75e22f5ea86b599a5a92bf8efcfe5a549146533b725aa0fd41584c4d12db3fab41ffbcbca60f657e9590f5 SHA512 db291209ab9a54238b244e02abbca749a695ca3b9b9dc2e207227d3ea32f13144f3236fa921df4c6ba954850635db56584582d8916bdf4c90a2adc55dc90cd3a +DIST apple_support.1.6.0.tar.gz 66375 BLAKE2B 7106e02676861b6ae4b0b42a12fb1fcde0470a99b49088beceabca87743430d6f691688aac5d4cf27e4c4e941781ee9d899fc7c3219095c00bbfe5b6eddafeb5 SHA512 e1d7a119d685fcfd9af1b9b48bb5685743af2d66e86d109575853172c1d9d7c1ce1beaa3fe65d21b55943d243506cdccadc724c603adc5566293b0c0846f874d DIST bazel-skylib-1.3.0.tar.gz 36103 BLAKE2B a58142b9d2a5da9f137705105aa735c8489519989ca7e633968114309f23074a56cd03b5fed70e284da63751d666904c2573940ad9a0feb2be689d695f0f07ae SHA512 ab3a0b465ebbfe07c139b92f1e8b2c0bcede66d6366d184891e3c0ccd6619164bc299777e7d7236cb463834b98426f6fb6890409e0ce94b75446dbd85854944f DIST bazel-toolchains-8c717f8258cd5f6c7a45b97d974292755852b658.tar.gz 80728 BLAKE2B 340a295d8998d01eba6bdd3a97efae869c5dde93dee9bd9266af8ad92a00a5c84cafbc6dd1f5d20f78dfdaa59f7585cefc7af4b87df712489db7f76bfa1d5210 SHA512 cf391a756d2520c65423f991bd0afdf3aed1785b91481c55c5d3182e54b137fc7922fd179e758af2868e11f9f10ce9903409015a2fb0f18e67d14a3b073c6d72 DIST bazelbuild-platforms-0.0.5.tar.gz 5399 BLAKE2B d53aa2dbbd526d15aef24914053a5fa7c7402e1201d94a603c8148281a933292e4815f01aae7f7f166d5325f40b14d2810d6d8e03e0780329c25299c3d8ebffe SHA512 332d5954c349a7d0b801d6338bc42193a730e8ba6c75866ccef17c5053f8f29e1d185cd33a73fe5758e96e57de0c2631974f45d18bdd58c825f9a5fc231ad8b6 @@ -22,6 +23,7 @@ DIST bazelbuild-rules_pkg-0.7.0.tar.gz 76580 BLAKE2B 77574785070b45609d12aa2c2dd DIST bazelbuild-rules_pkg-0.7.1.tar.gz 77334 BLAKE2B fef99181792dac840724d0cfe4f1d71ae77e16e9da0b2f938752e6971b04264bfb7d731998998b5637da774b5e67adb68cc7eb3c4f38a3933ef62f949d56553d SHA512 5b47922e9b60bf82ded612bf023d66d2c6786cc81abe6bc1653aa93400e3497acc2d92d5ff90f9f4ff757143ea0d66c1f8c8eea4059142889f9eb0d9073d9a80 DIST bazelbuild-rules_proto-11bf7c25e666dd7ddacbcd4d4c4a9de7a25175f8.tar.gz 14304 BLAKE2B cdd23ef47d247f6d1b9fbaa49edbda7e1cd55ad2e3616f43ff8d21fa42888c1f36172683e47beb3f3678a2b252d9b6c82fd692711e3133862eade8b64da06ea1 SHA512 024021816b4999b62db41709e0e9354ffdc88beb61a081b061d934241f06b1aa5be3b74324cbec94d1871e60eb65209b2e6b4bb8ca4a101218eaf6196ec6a974 DIST bazelbuild-rules_python-0.0.1.tar.gz 2302092 BLAKE2B 1db52eebf2461d779f764f2afdd070d1d0dd65eb2b83ccd98c2831da1784614ca281b114064729a9f257c64eceb62975aac8362d231c84f32abdf19aee7a1852 SHA512 40fa069a4482e2f83e29dc8e109652d14d187b2ec8efdcd36e98d117de93d66a938ed74999b42a2293fcb6eccc0a111cbbcf65c5c155579214bb1b96644280a5 +DIST bazelbuild-rules_python-0.1.0.tar.gz 2490176 BLAKE2B dfb4df19ba787c3cb2c2b0ab7115b9678b64ba03b61b60af0253031333aef2ac37942e425ff724e3e759e5198e8ff45b55866a20f7b497f5735adb9f8deb1e72 SHA512 b83b35f5b200f115d9d5e89b2c81745dd834155f52be0ad2972007d4654ae9438f24c7bea3c9122e6056924a69b348ec3c53d649e092dbe5ae8af3b2119bbc5e DIST bazelbuild-rules_swift.1.0.0.tar.gz 199181 BLAKE2B 8261cf061ab630cff5bd0bf55c0b62252d3c7cc34a368eef80c0d1e70534dc43b5596077754306e87ba3e5bbc4b77710ba4934ff748079b8e03e72143e15deab SHA512 9e4acdd0a168462b5b480aad72cda9b3b16aaaf86fdf367c4de80dfcc093cb80c74f2f2219190730be10471d07c94c4f9bf8756a938fb9aaee9d1a4d983c4761 DIST benchmark-f7547e29ccaed7b64ef4f7495ecfff1c9f6f3d03.tar.gz 204856 BLAKE2B a1601a38e71e08490f42e85e87675b2451082c531e2712f7e78ac0267c7fab0b6f1ac96fde34933d82380c61097a4467b277790657695fa51c97ac9504435959 SHA512 e4323f2e7b05566e7b634cc1ec18ae64c7cee1bf4ecdb2a3da97dec8393b1ef26580975e7823d4ee9d51d151db6456bc685717836eb3209574ada22a07451063 DIST cub-1.9.9.zip 619595 BLAKE2B 265b797a906b03da886de88863236c9ab90daa31498ddf848fcaf5e5ee1342614ad9a41618120ca09cc4c0da3e96eeec5e20ca9d7ba3f9860c507f06d15e59e1 SHA512 8c9c0a3f66f8d518ec07f857b5625e006d52f28bade1c1478a0f37420e2f7586dc3ff029d551748a1802bb5544b16fde5388e8d5a45d61eec595201b9db7a30d @@ -29,6 +31,7 @@ DIST cudnn-frontend-v0.7.1.zip 20112411 BLAKE2B 6f836f6b484e708d43833aef3ae52b93 DIST cudnn-frontend-v0.7.3.zip 20124177 BLAKE2B 9c32d99d69d4c7bc96fd6189aa1c2cdfac6fa31dfe84beebaee0e791e7a27768864067159da4473f737612973388daf39c7770ad9c1270bed840221bb603fc4d SHA512 68f5dba9873b317d8239187b57e7b4306e9078e52ef0992e6f23982aa374eff6c2ef2232b6cfff8012f50d9105d6f61c84f7f7c9ab4139d4db451599f896e0b4 DIST cudnn-frontend-v0.9.zip 20077185 BLAKE2B fcd9425be4c2ecc39db0fd92be355a7767b3d00cea990ff4b63ade3dff957f97a6e5fdb1e6f287f6473b2212a66e160940328062a70485c38d5619cf3cc2eb54 SHA512 f38fc912303f4f61ae76d3159ac51b105aba0f728e895b850a164480a729ec144bd8f99764db3e2203210dc114662aba4b4ffe0435d027c0cf620cb42a50df64 DIST dlpack-9351cf542ab478499294864ff3acfdab5c8c5f3d.tar.gz 76170 BLAKE2B c28873deab233d73996137c254acecc4adb0a750cee675cfd0777ccdfa91ea704e338e7166705d47e775c45b46b152834268d89c0443a08c57b4b830bd07ac71 SHA512 e6a4fe9356b8f75f96e7f9960df40e227f8e5242e609f8cc8bf28e8161bd4f58e8c6de374d9cf216edf7e0e09ca502bc158d41c3058bc6e6e7b2bbfb9c5483ff +DIST eigen-0b51f763cbbd0ed08168f88972724329f0375498.tar.gz 2848019 BLAKE2B 005e353f101ee9db0a794843b03169e35d0b74867b7c5638036b973ec2424539646f61e063c892de577e04c78e647c8cb11ede67134b7a8b0471286be7429dfc SHA512 9885feb9ae493bb8860799c69d795df199f3ab84afd2bed5b1e71fff221ccd22023b512b4ab763e1b0c684a93d20d5d202088246fc5ffe211f0995e3839ece55 DIST eigen-3460f3558e7b469efb8a225894e21929c8c77629.tar.gz 2833459 BLAKE2B f624102a174d80860314f0e895f641fb86369a2df88ba2e2589da86e5ff0802b36c64be44211f6013997e0251d74bb28051e3e7edcc6cc43ab88448524db3152 SHA512 c015dae59e0df1f25b6caef5f3c724cfd91cd440df6c3eba1ee7c129b20caf9ec4010209cc5edb82ed6534503a697ba6ee49f64be0359de332ed023cdede05cf DIST eigen-3bb6a48d8c171cf20b5f8e48bfb4e424fbd4f79e.tar.gz 2810518 BLAKE2B 97c9221024f765e6899c676602ee2c850fae661dad613957cead4bce29fce8d9cbb1ac20b812b71c699feea75768be2da945fc39e9b9e9cd2e5b3c6bcf034c60 SHA512 de2c35d3ab859021dac9de35b83cb94e824814e436cd40e45ca2f5f7f9fefadac2b4222f05de9eb1b03d04a29c751f9da3a2b804e6c0fc97b4a5508f25b1e7d4 DIST eigen-b0f877f8e01e90a5b0f3a79d46ea234899f8b499.tar.gz 2833536 BLAKE2B 04bb103b64fa8c81ed337c67f5484fb15097f03905a504b19ebeaad4b33ab75baf1e3a2e060c91f1974272f55998555cd16c3da9d8a54a725aef39da7d39dae0 SHA512 1b239db63199aa6f3f9c35198294aff5b89c817befe6f394d69d2f4d6c6e3d946fda32119592da0d7894ea8b4fff12a1c1b8c5eda2e52f7365dc5aedda11f90f @@ -38,20 +41,25 @@ DIST googleapis-6b3fdcea8bc5398be4e7e9930c693f0ea09316a0.tar.gz 6091152 BLAKE2B DIST highwayhash-c13d28517a4db259d738ea4886b1f00352a3cc33.tar.gz 160745 BLAKE2B f2ffe8f13b1a8346b5b29e719c0a57f34e1fa6320d0f2d0575e4e97e813ed1a8f55acfb44b57aba70ea7d372ade18aee5ef82a881ecf457f580ffc92d6528c7b SHA512 4d6a7e8e321df82c009a0849b8da097a551f6c35a49fef65b89e731075a6af624918c0e55be9fd3a5bf07c519ab09bdefed57e0e39d4df0e79e189c468939de7 DIST kissfft-131.1.0.tar.gz 52383 BLAKE2B 74e6d2e7d132a311b31c28a468e13d9772a53f0ea0abed0e0f49d8db9c183fb0646f58fd38df3e797b8577285899daf6b80446b149ce2582bb828410656d96df SHA512 bd715868ce0e93a291a0592fb1f8b960e832fc64efe863755e52b67d5addff9bcb444a1bf2570d1914c52b41dad1023d0d86400f5ea30c9fb84cd6b4f7210708 DIST llvm-project-10939d1d580b9d3c9c2f3539c6bdb39f408179c0.tar.gz 179559452 BLAKE2B ccdf998502aea7e87ba128560f5458b2a959d32a13f56dc795b8a0ed794c0348ca035ca601a34c0c288990f358dc5c3c01712b7458ebd15c48b1c41b4413fcd2 SHA512 36997be5da7caeaf949ae093b3ec1d81dda668087cc94de1fee9f5262a019f40fca61e652787e3f9498cd8b021b8ffc8002daef189ae0e15fda281ef6d56ecd7 +DIST llvm-project-668e33c6401abe7844691fb7d47a3cf2d2012dbc.tar.gz 195368050 BLAKE2B 4fe9c27ce35e579e36f1bdca7281206eeb530eeab00089b4c71834f7b47c96e8f951f3ff8477e2946a30c21cd4dfee5a9b485282e5f4f449a70ad7652f378a45 SHA512 6ef3796c8931503f8e7000087262feb011995c1df79f7a7776ab508e9fb37a7bf5bad471d3317d550142b68818a1b3a26d39e23214a3fff852de0c763cf05b2f DIST llvm-project-d8415b02a519f222ecf71b069c96cc85ac635de3.tar.gz 169045379 BLAKE2B fe25f9c889c5159fbc1d251640f65b3097b4260ec7b27d5133843502ee397995c1075fb10f1a6519c6f693757ab8fe0fe2b82bb96678ef4ec4086c09ce3c90c3 SHA512 546edd97778b4298d7bb645620010e00569813fab07b925a909db4cdd8feb9adc4898e488f7bb31e70587d7b4134820a3f49a98d4f87bcf1dcad9adf5eed7e4c DIST llvm-project-dc275fd03254d67d29cc70a5a0569acf24d2280d.tar.gz 182909064 BLAKE2B ba2a2db104849d1b09115cc2decdbb2e5dc84c58b61074500ff728e29c2f380a0818a4e8df22f4a1552c04e243dd114862492d7f8df06132348034c500200e14 SHA512 4f51271b765a666b023547382f3f983453afbfc69b793336e381e335d6103978292e781f86fffe16cba8b6d6ea309b64e6d899570060c275779aa0a2b90948c7 DIST llvmorg-10.0.1-openmp-10.0.1.src.tar.xz 955492 BLAKE2B 4197ecfb2e3498a95a2ba5923f2b4bdafbab645ddf2d3f1875d39752d3ab7304fb35bce918d2dc6e40e5ea809545ae0907d6bc0b94a3d68b8d96da48f5a62adc SHA512 5b6f6487feaabd2a18ef2bbb1a5f86bb567e264f9fdf56805cfdd3f65f36f21e7c1392ba5170fafb52a395fc1606b277233beba3df31dc2ab825ef8924e7a05a +DIST ml_dtypes-5b9fc9ad978757654843f4a8d899715dbea30e88.tar.gz 66243 BLAKE2B 541ce280fff68b51eb9e7f9eaff7c03d0c095ed5b219d3ca3d61c31650a21a63bae6fd6a8efddaced6de4d2601d5a7c6924d300d120c30907ea5e239c00ec70d SHA512 78f7e25e37ea30b0dc0cfd0dec4e03d4e208cbf223c239fa386eec9f9912f1adea290eefcca7b15b73f2329c457b34fef4374fb1ad0f3cedb02b310e0eb9fdb6 DIST nvidia-nccl-v2.13.4-1.tar.gz 287269 BLAKE2B 8719e26978225a9697101fb7998388c3179bb0af5c396602689242e5529492ad7a81a142e3a8492c9fa4c92adc251c58e67f97fee64a4fd1a046134ac7d737d7 SHA512 5f7077f69a0c1624a1c8ca3d2f503d8269751e26cb6ee63e1a165fb86877b62073ec4e182d939b9aacce4ee8bb8295a39d1b6d65ef3dc0fce795735341a13fc6 DIST nvidia-nccl-v2.16.2-1.tar.gz 326883 BLAKE2B 86db7adc67ba311b72e7e013dbc2a04918c0746c1fb62079ccd3300691479e1f6e35e379d6ee4320e343666b68372c56607ae521f5ff2d7e59d5f4dc3b894097 SHA512 e6572c2e7adc03053048c0b1e5290ffaf6f294239d78038887582c847aa549e5e95c7970b943f1d0b8964c32b4cdee3785bf40886f274907b613f320e9de10d0 DIST nvidia-nccl-v2.16.5-1.tar.gz 327261 BLAKE2B abeeb6a2d4b58647ecb17694d92f79e650d2f2ffbccf26682ab202e17a1b7d3c356fce26d9f6edffee0756d71887bba8a9d5c254ad433d3b4ae8babfe3294534 SHA512 fc13e83e2339921b732c02250e95614b21202c52461aa262489714af6d92117aa5c0647bb0dcc3394cd357d4b7e8a76fe4c3a3567ba4512c359f19e2ff41de4d DIST oneDNN-v2.7.1.tar.gz 6405831 BLAKE2B b43253f7bc1be0bca51746f06d825191ae544376b259662cbf8f567d8f39a6befde3c88a14744e053b851d2f89fb2600b999abef1acb585bc116d6fa0c95fe3f SHA512 062e97ac613d265627ec1f010aa1d101bf71c449c813187c26244c66c9e6b9b582a0a0a823a9391fa828f396051318fada8263ff64c4f4b4bb6ca1d7a08ea6e1 DIST oneDNN-v2.7.3.tar.gz 6410473 BLAKE2B c6730100e0438d456eb4986f416ae2bd1f173a80c52d5090523af06790afae8ee17cc58ffa8ed7215cd0eff99191a925d8cdce9986c72ccb8ebffacedc434b18 SHA512 ad9450f8b701288fa1721f64d0cb33fc344e9fc4f84e517b3d52377189ffcd0c5b56156ef4a33ca3ffe2da886abcc7ac5b2a3407cc155bd4be2223751b84f7c9 DIST oneDNN-v3.1.tar.gz 7556565 BLAKE2B db6865410e902778c0153c50cc1f16c12e358b360d7e865207a86489d42727e66945d422b8bfa52b04b5f2b34daf585f1472a031cd8810a36c6724a2779120c1 SHA512 2053157a3885618364a9da5ec738c9cc2dde15db1ce9737578565d25c4a15a65944db3bbd17780de2032cfa2329bea4cb0af24ee428c9c246bdfa07a5bdde30b +DIST oneDNN-v3.2.1.tar.gz 9186820 BLAKE2B f85cb1b410c3f57e098106ca13939c8c93c396e012b15a63c4f728ba75138a6f371db5fd182a54711479beca8f215578ea52d9c3d07be36647f6befb6c16746a SHA512 115819dc47fce5ef8fc7403f88e141743b360bc33243c90740d1b3871849ac379930d23e3e1d62d0abaaa3af5d2cdbd4218b80aa1be1edb09d0d949f7532a559 DIST openxla-stablehlo-43d81c6883ade82052920bd367c61f9e52f09954.zip 27954369 BLAKE2B 30dddfcf2102e344d82171d8fcb2df68a3c2dedfc349a3f248c060e591535127d7716e1bf10c5eef20369eb0d81a6cc0eb5350a6979adb8a164b7bda62d6c745 SHA512 2432e4256bfd2d92ba717895967d87e05bb0201a5086314b1de5fe9078bfea37c14245b88b720ec82f2906751ab344da0dab9f714a6fffe79a0423cf7659e5ac DIST openxla-stablehlo-51f005f0a8ff6e28f535adfec4de936cb4097aa4.zip 6902721 BLAKE2B ef9766377a38f816f5a6dc60f34d5300b2775bc282084e9f34c7a5ccc6104a0154d44f2c57aba081889de50fc141a6059255fca3f681322343e316289d6540d7 SHA512 ffe46e21be6f617b6ecbc7ef35e83d441256e429150af60451cf04c02085fb1a0b60a9687d8d60d6f1f9321e6f6a92f24749a3c1cf1ee694a8ffc0fcd13f64f4 +DIST openxla-stablehlo-9ae6c373a6e2941ff84a8831bb3724728cb2b49a.zip 17784280 BLAKE2B 99bd410d36d78c4dbefef46d7df137b0bf583cc9cb2d34832f3d95360237647a05511c904ce23030d23ce6d95c34af11c29085db9f775aa96a702c28cec1891d SHA512 b098c2ec986ffae14f916a62095561942a809f80d2159005fbaa08691532ae2a3383b11d8672907b116fcedcf21c88ad86f3f4911b666a66543eab16fae06d86 DIST openxla-stablehlo-fdd47908468488cbbb386bb7fc723dc19321cb83.zip 301572 BLAKE2B c8242b3d9612fbdfa1c34ae5cb610aadd68464498e6cc84d48bcc38abb9e8636fa31b9a03b5a604a29cafe12a47e5b7be90c48d52fb6587bcd376d6307775197 SHA512 61b89d0dafe450ae95d770878385b5ed3cbb0109b79cf5e01304119f2f52255ccc83cedc542cfa5c342692945833b86606468224e67c6ff9dd6f7216b86adc7a DIST openxla-triton-1627e0c27869b4098e5fa720717645c1baaf5972.tar.gz 985789 BLAKE2B ef3aa4d8033757d37a9ecde3a687a261c5ecde1200b30db1ae24cc5725c334f8662e741041be1e854ddb2cd5d9cb2b9d5f254d9219e8035c5f08df92b3ee9fab SHA512 67ae9075219e142ec3f3e5c3f05cff5fb15f3ef9605bd1c0dbb1e26940bf1e428a9c17b0d5f85c069844f6ea94b8a3ce94277bd4cd344238fbbdc3f56f91e08f DIST openxla-triton-2c3853269281da6742cf469a5ca5772947d271ce.tar.gz 459751 BLAKE2B 8b1b314fd1b6d8822a84cb9cacfd70e2c59784a76f879d75c910f376d38fbdccbc132ebab7f29c5bddde04afd7821c0322311d97c55fcfcc87580b82039a7efa SHA512 cedee3b982b93ae237a1e035ef2a9447aabc94ea4add63959b927670006b5cf546f064d5741ee9e731de31c343ed5869abe9c479d07360b07ef2b26f93081a6a +DIST openxla-triton-cl546794996.tar.gz 948559 BLAKE2B 9c2ed46364b4986c39466803f14ec5618cab0cbc504f53909f74eabf7f6d5e5f4f6fcf1d19965f48b38f18dc99f26fc02ecc7275f05194b228e281988bbb4cea SHA512 680774ffb6cf291bb0f7bd851d4cb66d4e40d70ce2761441ac17595fb98fee6cb013fc5d4f8ca33d79f7b09f2e2924e50c027a09e7250d72767c59a119e56143 DIST protobuf-3.21.9.zip 7156366 BLAKE2B 464ec84fd380d2d472cde5b7dd978c31ac8dc455934b7b1f7afe7dd836579ff74c1196d56dea2944fb41e5ef2f2e05683b889b54e4a3a11bb7cf87a0cd57f691 SHA512 311e0bcd3da54b8fb2df4de03a9b465cd56451018438e4d72054d78e70c37c70ee3c8a0378a177acb737ec863bdf9250d10bcbcdc0f97707c7edf97b9b37683b DIST pthreadpool-b8374f80e42010941bda6c85b0e3f1a1bd77a1e0.zip 61524 BLAKE2B 924419730bc6b94ec98a13db94f177b3b70d6c763158f43fb6f9e45605e73cfce238e6c996e2bf629dbb2a5af93ae99849ddc91174fc4664d702667f7423892d SHA512 d25262b47e39058d5aa8b8197c4984a5b941587a19079a2210f738c3af34ab8e8477731c88ca80c3f812a6a04635f80300124d93cc1e099352ef2aca04bdc3ae DIST pybind11_abseil-2c4932ed6f6204f1656e245838f4f5eae69d2e29.tar.gz 28202 BLAKE2B 58a13d005367d938e9fc17da6435a8c2706722d0770c173cbfc433b7ea4de7e7d1b97653c5859cc8c436cccda3b8d21df906249a3a60ee4bba7cc6601abfaa59 SHA512 91befca03fa1b4f12446b84d5fe3514df6c9e352a19042f4f8856f6df306229f23b6ca2976a17ab22c8dd5afa223a44013e54a4348298c305a7688646129f0a4 @@ -60,8 +68,12 @@ DIST pybind11_protobuf-80f3440cd8fee124e077e2e47a8a17b78b451363.zip 76830 BLAKE2 DIST pytorch-cpuinfo-3dc310302210c1891ffcfb12ae67b11a3ad3a150.tar.gz 3515639 BLAKE2B d4adc49ea1bcbfd7a7efb13cdfea6a1d9cf717b06209c03342f92a3d624de65bcdf2ce7aa7fa8bd5f95ad423447ee833fdea718e16f98037344df75de8bde943 SHA512 f4c0ce922cee0df62c15a335889bb59b5f70ad71448b42b9c1bfa9b5e77c4c4d5f1613f607f32fa9d6817d0d5f49c554e1378079a1cd66a9cd2492796d48c3c2 DIST pytorch-cpuinfo-3dc310302210c1891ffcfb12ae67b11a3ad3a150.zip 3812878 BLAKE2B 30048677534192f1e288c69be5a0373844206cc4e209d48b92f5bf38da37003bdd5125b6588ec0f34acd855acd9cd16193725976ede644d3140fbbcf03d2d364 SHA512 963fa6c6948102d15cae1db90645e9cf85d1efc0fd541f75dfff7d6efe62fdd196085910cdb366be56b7e71f36df98edd211fc875aff6eb1e9962e0d62f43667 DIST pytorch-cpuinfo-5e63739504f0f8e18e941bd63b2d6d42536c7d90.tar.gz 3512335 BLAKE2B 71c14866fde3846b5f375b39fe2f081a6c219b36fc0721640971f99c53ca77c8e7f8df83992d777af32a28c645d64f27dca838bd8e60835c5497e131467b22d0 SHA512 6a61f4574661a55771c2ec31bb0919a51d0bd8c770477b254a5c14dc5323716af275c7fe3abc5aa96720d7cc929559ca66f614265d3940e076b8db2fa15c8e36 +DIST pytorch-cpuinfo-87d8234510367db49a65535021af5e1838a65ac2.tar.gz 3516012 BLAKE2B 78845105e55397d3e948382bac22fff6a639c2d27c096176f29b9b29c6e1e2f78a8ffb44eddf5e20b4d08e2f5dbd7be949a6b55ffe0ca754a00794db8de540a3 SHA512 53b687196b8e46bb99469bbf37f8141c3ee89be78bab67accc63af849207a0234447304b7fa63fb44635add0ddab585df337130acb85fd7b026c0990e12a5840 +DIST pytorch-cpuinfo-87d8234510367db49a65535021af5e1838a65ac2.zip 3813501 BLAKE2B 7b4c54fd6a21b9845dce1f1834eb07613b165ca3fd8ac132bfb3a6964354af9910664f77601f7b839a8770036a1b2a2b21befe3a51d2e6c1e0c6400abbcc952a SHA512 1f697dd26b01bda1e21bebb948fdc2c224455910f55fba7327533b131c016f7cb51eb00804d6d765b37b4614c9093243898363482b3e37e427f83941b0c88f48 +DIST re2-03da4fc0857c285e3a26782f6bc8931c4c950df4.tar.gz 396457 BLAKE2B 33b90f851c4a4ba634bcb907e8f32d21f6792e0fb680d38034d02f469e499380b0b458ad75fa42f1ad931cda314f7fb0ba0798ba085e176e5f2e38a67c8e14d7 SHA512 d573150b7a6d8fa3e970f0e245501d7653944fd406f2dc4016c7126c9602be988ba18d0c8b0296684dd30f4a3b9207284db5df4ef5f05e305160b845209f23d0 DIST re2-a276a8c738735a0fe45a6ee590fe2df69bcf4502.tar.gz 306766 BLAKE2B 54162a22afe10b392e30864729b3b210194c0dbf7926cc3536dfe3afd43e0b8abf3d01b08e0feb71a8ade19cd497aea9e9b7b34eacb85e10cc7e1c5fd62a407d SHA512 9557830cea13485bd332ccdcdd3735ea63e6bb41f4cf943ecc24a1d79b75a58c5894cfe70c4a35a07a1b2b1f3d2ffa76fbeff168fda17c72b3436cb6213121dd DIST rules_apple.1.0.1.tar.gz 2198467 BLAKE2B 79178efe1acfa36af3f41b31610a5add8bd9c35529931415ab45cc1588c4fea477ddf0cd67c5d799de688db049fe2f3ce776c5e5da2e1dde1c329efc44d51ec0 SHA512 a24d880dd8174241b2808424fd3364f8b0ba9865109704981ad68f383d983bab9f8e8923942423e70b0a3af6951d16f5712647692a8ca3bef4347057f3536cc6 +DIST rules_apple.2.3.0.tar.gz 2222651 BLAKE2B a0a0e153eaa16be0ace362c1738b3f5ff87a3e76bd6fbf65c7d76e68d33b40d99e86045cf0bb104644fbf076024dcee4082303b823e12cbce6675202d93c29d5 SHA512 e8f6c164451a28ad70a9fdabfc2540a0165de1d23e2bdb7ec06ddf2e455f55cf82b8024be0e88153fca44433ae84144cc4054974737c8f6374fc88de37338394 DIST ruy-3286a34cc8de6149ac6844107dfdffac91531e72.zip 381045 BLAKE2B 00cf2a009ff6cac8e3e8c3380c3fdb4fe9299614eba56bfbf0b2b8dd908ec2ec7d58b8185810899dd8bac80cc990d69a26e01eed8562f73c5fc08b8b3ad198e0 SHA512 4acb4dcee41788c3f2a65a4335f64d20057980289f231a401ea45c27dcd16bc2e48d0748d6ad35e77c3212104c54353193d4710260993ae8927dce24a6ef435e DIST ruy-841ea4172ba904fe3536789497f9565f2ef64129.zip 379664 BLAKE2B 82f54b4e7959ca2ff489cf0eaa7c01c5084b11174a43e2caa8f30dcd3951fb9552e513fa0488190fa73dde62719bfd8e4be59bd264fe316ec5b9852db2494ed2 SHA512 e10bed1901eb53cc0174d8723b67b7ff4f7021b5d94e8e7596879a9a625e77948f265d430b5c56f7789030874ba08bdb5263796212d9b60affd1a20694ec3317 DIST sobol_data-835a7d7b1ee3bc83e575e302a985c66ec4b65249.tar.gz 2569603 BLAKE2B cd66dc8a0b4ad3ea586a79ef588c8d68e4f05b6ea89e6d8119b5ee338b9046c11a747ca57c2f80a3c90fab119c05e85f5965571c9e522ccb8a312b95804d1a36 SHA512 b819d050bb38246b503f1eb3d6e7d878d0f166f3602ae3c327749b1f6ddc2b1d6ac7f768b1f1d055b98b7b98812e4643a75bcebc9728c7f615d67ca739761b3e @@ -69,8 +81,10 @@ DIST tensorflow-1.15.0-python-license.rst.txt 45132 BLAKE2B 8a8ace4b64fd9eda1932 DIST tensorflow-2.11.0.tar.gz 67996769 BLAKE2B 539a8d7084280023c7b019a079aad6bdf0fa94b22276250a02913fb0ad496b4af2115276152b4f37101547404b616de58f209b9d1036e5d4dd9b0f7072a59ba9 SHA512 cda16db72a0ede72ac9f5e76c3a745ea9d72421fa40021303032f8fc3ac2755f64524f97a4629c18cf888f259027439b49ec921e0f5fd329a6ba060235a658d5 DIST tensorflow-2.12.0.tar.gz 69710128 BLAKE2B 582b7b2717edd0ce41ecd74525fd38684d375cf1833c206cb53fa10ef964bb41ef8f29fa947a1f77e892bb68793d53c567bc9c4c9124dba94913f57ddcd3b1f9 SHA512 9273720b5be08e5d3dc76aafa4af6b27a2d50afd02b181e7632f3d70961995b2e0e5acb13e70c9b704ef475617c23d70047fbe74d5b63b156cf8f2fa8a856b84 DIST tensorflow-2.13.1.tar.gz 70663056 BLAKE2B fcaff251ae3757f7e20deb07566485ca243f943ce118886edcfea3c41aff8baf83b98e5e3eea97c5243cb5db2e7418ec58687b3099dffd5c750395b554689343 SHA512 4f7bae717444a34704cb53466051a5708be13342a193be14914fbddd4790e327f3b6c37063a36d1c7835c50cf99500895aaffc646fdb9b421e6b873dfe4b2e46 +DIST tensorflow-2.14.1.tar.gz 72760001 BLAKE2B dd443c087bbe524b8b6dd6e9f08ec1c7bbc406e2ae7f854573fd29212004f927daaf8115a66f1c11c97da2e6f40a44ccb0e4f8e28455e1bf94872d630277e4bf SHA512 c5e9a176027a00b5efb1343bee000330f56229a1a8559db2fb9e2c9388afaf8420d69b6fd6e7b85811272c110245315935232a859e9fd4106b29b226780c447e DIST tensorflow-patches-2.11.0.tar.bz2 2977 BLAKE2B 53672704ccfc5291f7070421af9f7246d2f211689b18f35917d4d166ff5e9ddb623db4dd9dc8054e0f2262b162dd8c2216446c6ca5e2bf538872debf8eb8aec1 SHA512 866c6abb78934c1a645ab3172f93d81423e2023fa1e8688255ef0777e340d810a6889c838b841be765f0897f7a269c4d6cb52b8f59af114bf5b9e181b1348590 DIST tensorflow-patches-2.12.0.tar.bz2 4194 BLAKE2B b61efaf0ade6ef88b5abb858a84b537e02ff9fcd032a2a7f68a6467e53511a50fff66ef7e1096f343a8909e165b1b76146cb6a8db8e1974eeecf2cbf0b6a71a0 SHA512 2f931fd4b995d33300d392f7dafd6dd23671772f733c28faed239d01e9b032967afb17cab50908fa38956e2cde479a13dfdc632e622d918fe55d281aa9b3dc4e DIST tensorflow-runtime-4ce3e4da2e21ae4dfcee9366415e55f408c884ec.tar.gz 15313054 BLAKE2B 316da579b93d83bca43d51198dc65dea12972d73f019a5b78fe53162966e022d21d4225ba4a7786d1a0f376550a1052c59858df04b958768962b88d64d3c5083 SHA512 ea490ebc8a5eef4a7ce6185c19e3b1305fd886c8145ef54387076f458bfec56a8a33452728206afa67001273920f6958317c8c4289e32ac6fea432e15a2502c5 +DIST tensorflow-runtime-769f5cc9b8732933140b09e8808d13614182b496.tar.gz 15183227 BLAKE2B 3c9a3f256db2cd4ff74318da2fc42dbe98669142cc0ea567ac29df4e2faea6e6bc55508f8ec555a88d19bc064123f80e9809affd64628dd9483adfa0dac41aca SHA512 d505278cc7b82f1b1b3c0588e654e64cd63824c920b0b3c93b778ec1f46f005d17e922ee24dde9cb78714f0a2b22c7038f73273d94c46360b7aca92cb5ad61a3 DIST tensorflow-runtime-7d879c8b161085a4374ea481b93a52adb19c0529.tar.gz 15228644 BLAKE2B e621ece4bbe3139661ef48c628459118eb2078151907630d6fde4086bd73f09af2ab0bb1c43ccf81d84230e3bb3be617e505f76c5d4333fee9adece58e4f4042 SHA512 f79f1e0a44a60cd064e21461380dfd5eb47a8912064f238da4ea94c8c8c94a680e438ff2b202bd0c81049e104293b5bbbcdfb604cf9ebecf6e6bf34d6782b0f5 DIST tensorflow-runtime-91d765cad5599f9710973d3e34d4dc22583e2e79.tar.gz 15226589 BLAKE2B 5a00d0f884c86f85a25aba8f7d9eee509f35c114e9bfa24ce3effe9437bc549a4a7f013b03b515fbb4a217724a2e2abca96300fba64560b0d0e4fdb05fb9c3ac SHA512 b2fc8a240de1a13fade8628358f410b8f6e1bfde9d2cec3765b62d2ee7eb143c168687a23cb79c7aecd19a668d2d3be46fba361d16ad29b722fe19004aa3c5a2 diff --git a/sci-libs/tensorflow/files/tensorflow-2.14.1-0001-WORKSPACE-add-rules-docker-http_archive-bazel-toolch.patch b/sci-libs/tensorflow/files/tensorflow-2.14.1-0001-WORKSPACE-add-rules-docker-http_archive-bazel-toolch.patch new file mode 100644 index 000000000000..9e93b3d5b8f3 --- /dev/null +++ b/sci-libs/tensorflow/files/tensorflow-2.14.1-0001-WORKSPACE-add-rules-docker-http_archive-bazel-toolch.patch @@ -0,0 +1,37 @@ +From 0d3f532325cc39eb816e94e5bae259ea5a5a2304 Mon Sep 17 00:00:00 2001 +From: wangjiezhe <wangjiezhe@gmail.com> +Date: Fri, 24 Nov 2023 16:34:44 +0800 +Subject: [PATCH 01/13] WORKSPACE: add rules-docker http_archive, + bazel-toolchains uses git_repo + +git_repository() rules cannot pull from --distdir and fail when building +without internet access. Use http_archive instead and pin the sha256 +hash as well. +--- + WORKSPACE | 11 +++++++++++ + 1 file changed, 11 insertions(+) + +diff --git a/WORKSPACE b/WORKSPACE +index fb3af8a2bea..644b731b1dc 100644 +--- a/WORKSPACE ++++ b/WORKSPACE +@@ -1,5 +1,16 @@ + workspace(name = "org_tensorflow") + ++load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") ++ ++http_archive( ++ name = "io_bazel_rules_docker", ++ sha256 = "7d453450e1eb70e238eea6b31f4115607ec1200e91afea01c25f9804f37e39c8", ++ strip_prefix = "rules_docker-0.10.0", ++ urls = [ ++ "https://github.com/bazelbuild/rules_docker/releases/download/v0.10.0/rules_docker-v0.10.0.tar.gz", ++ ], ++) ++ + # We must initialize hermetic python first. + load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") + +-- +2.41.0 + diff --git a/sci-libs/tensorflow/files/tensorflow-2.14.1-0002-systemlib-Latest-absl-LTS-has-split-cord-libs.patch b/sci-libs/tensorflow/files/tensorflow-2.14.1-0002-systemlib-Latest-absl-LTS-has-split-cord-libs.patch new file mode 100644 index 000000000000..5436744e1275 --- /dev/null +++ b/sci-libs/tensorflow/files/tensorflow-2.14.1-0002-systemlib-Latest-absl-LTS-has-split-cord-libs.patch @@ -0,0 +1,32 @@ +From 33b11df0767ead9a64a65e3ae19e329bba91dd75 Mon Sep 17 00:00:00 2001 +From: Jason Zaman <jason@perfinion.com> +Date: Sun, 6 Feb 2022 00:13:56 -0800 +Subject: [PATCH 02/13] systemlib: Latest absl LTS has split cord libs + +--- + third_party/absl/system.absl.strings.BUILD | 9 ++++++++- + 1 file changed, 8 insertions(+), 1 deletion(-) + +diff --git a/third_party/absl/system.absl.strings.BUILD b/third_party/absl/system.absl.strings.BUILD +index fa9a7a84f67..63bac99d71b 100644 +--- a/third_party/absl/system.absl.strings.BUILD ++++ b/third_party/absl/system.absl.strings.BUILD +@@ -26,7 +26,14 @@ cc_library( + + cc_library( + name = "cord", +- linkopts = ["-labsl_cord"], ++ linkopts = [ ++ "-labsl_cord", ++ "-labsl_cord_internal", ++ "-labsl_cordz_functions", ++ "-labsl_cordz_handle", ++ "-labsl_cordz_info", ++ "-labsl_cordz_sample_token", ++ ], + deps = [ + ":str_format", + "//absl/container:compressed_tuple", +-- +2.41.0 + diff --git a/sci-libs/tensorflow/files/tensorflow-2.14.1-0003-mkl_dnn-Must-link-against-libm-for-round-and-log2.patch b/sci-libs/tensorflow/files/tensorflow-2.14.1-0003-mkl_dnn-Must-link-against-libm-for-round-and-log2.patch new file mode 100644 index 000000000000..8cff4a422ee3 --- /dev/null +++ b/sci-libs/tensorflow/files/tensorflow-2.14.1-0003-mkl_dnn-Must-link-against-libm-for-round-and-log2.patch @@ -0,0 +1,29 @@ +From e098854ed15caa864b83033a1bc6b1aa7ca93a5c Mon Sep 17 00:00:00 2001 +From: Jason Zaman <jason@perfinion.com> +Date: Mon, 5 Sep 2022 12:52:44 -0700 +Subject: [PATCH 03/13] mkl_dnn: Must link against libm for round and log2 + +--- + third_party/mkl_dnn/mkldnn_v1.BUILD | 6 +++--- + 1 file changed, 3 insertions(+), 3 deletions(-) + +diff --git a/third_party/mkl_dnn/mkldnn_v1.BUILD b/third_party/mkl_dnn/mkldnn_v1.BUILD +index 263c64eb681..f1860b1e7c3 100644 +--- a/third_party/mkl_dnn/mkldnn_v1.BUILD ++++ b/third_party/mkl_dnn/mkldnn_v1.BUILD +@@ -165,9 +165,9 @@ cc_library( + includes = _INCLUDES_LIST, + # TODO(penpornk): Use lrt_if_needed from tensorflow.bzl instead. + linkopts = select({ +- "@org_tensorflow//tensorflow/tsl:linux_aarch64": ["-lrt"], +- "@org_tensorflow//tensorflow/tsl:linux_x86_64": ["-lrt"], +- "@org_tensorflow//tensorflow/tsl:linux_ppc64le": ["-lrt"], ++ "@org_tensorflow//tensorflow/tsl:linux_aarch64": ["-lrt", "-lm"], ++ "@org_tensorflow//tensorflow/tsl:linux_x86_64": ["-lrt", "-lm"], ++ "@org_tensorflow//tensorflow/tsl:linux_ppc64le": ["-lrt", "-lm"], + "//conditions:default": [], + }), + textual_hdrs = _TEXTUAL_HDRS_LIST, +-- +2.41.0 + diff --git a/sci-libs/tensorflow/files/tensorflow-2.14.1-0004-tensorflow_cc-Add-systemlib-nsync-linkopts.patch b/sci-libs/tensorflow/files/tensorflow-2.14.1-0004-tensorflow_cc-Add-systemlib-nsync-linkopts.patch new file mode 100644 index 000000000000..0fa4d02d4c62 --- /dev/null +++ b/sci-libs/tensorflow/files/tensorflow-2.14.1-0004-tensorflow_cc-Add-systemlib-nsync-linkopts.patch @@ -0,0 +1,35 @@ +From e6645115b8a838b40a49c73cb948dc373c5e98c8 Mon Sep 17 00:00:00 2001 +From: wangjiezhe <wangjiezhe@gmail.com> +Date: Fri, 24 Nov 2023 16:42:48 +0800 +Subject: [PATCH 04/13] tensorflow_cc: Add systemlib nsync linkopts + +Linkopts dont get propagated up to the shared library correctly so +workaround by applying them directly +--- + tensorflow/BUILD | 3 ++- + 1 file changed, 2 insertions(+), 1 deletion(-) + +diff --git a/tensorflow/BUILD b/tensorflow/BUILD +index 202553cd531..63ce1e7b385 100644 +--- a/tensorflow/BUILD ++++ b/tensorflow/BUILD +@@ -39,6 +39,7 @@ load( + "tf_cc_shared_library", + ) + load("@local_config_cuda//cuda:build_defs.bzl", "if_cuda") ++load("@local_config_syslibs//:build_defs.bzl", "if_system_lib") + + # copybara:uncomment_begin + # load("//devtools/copybara/rules:copybara.bzl", "copybara_config_test") +@@ -1312,7 +1313,7 @@ tf_cc_shared_library( + "-z defs", + "-Wl,--version-script,$(location //tensorflow:tf_version_script.lds)", + ], +- }), ++ }) + if_system_lib("nsync", ["-lnsync_cpp"]), + per_os_targets = True, + roots = [ + "//tensorflow/c:c_api", +-- +2.41.0 + diff --git a/sci-libs/tensorflow/files/tensorflow-2.14.1-0005-systemlib-Updates-for-Abseil-20220623-LTS.patch b/sci-libs/tensorflow/files/tensorflow-2.14.1-0005-systemlib-Updates-for-Abseil-20220623-LTS.patch new file mode 100644 index 000000000000..7dadd35bc2b7 --- /dev/null +++ b/sci-libs/tensorflow/files/tensorflow-2.14.1-0005-systemlib-Updates-for-Abseil-20220623-LTS.patch @@ -0,0 +1,71 @@ +From c390554addb171439310c00dce2972539ac0e71d Mon Sep 17 00:00:00 2001 +From: Jason Zaman <jason@perfinion.com> +Date: Tue, 30 May 2023 09:10:03 -0700 +Subject: [PATCH 05/13] systemlib: Updates for Abseil 20220623 LTS + +These targets are header-only and just need stub bazel targets +--- + third_party/absl/system.absl.functional.BUILD | 22 +++++++++++++++++++ + third_party/absl/system.absl.random.BUILD | 12 ++++++++++ + 2 files changed, 34 insertions(+) + +diff --git a/third_party/absl/system.absl.functional.BUILD b/third_party/absl/system.absl.functional.BUILD +index a4f70acf35c..579181dec07 100644 +--- a/third_party/absl/system.absl.functional.BUILD ++++ b/third_party/absl/system.absl.functional.BUILD +@@ -2,10 +2,32 @@ load("@rules_cc//cc:defs.bzl", "cc_library") + + package(default_visibility = ["//visibility:public"]) + ++cc_library( ++ name = "any_invocable", ++ deps = [ ++ "//absl/base:base_internal", ++ "//absl/base:config", ++ "//absl/base:core_headers", ++ "//absl/meta:type_traits", ++ "//absl/utility", ++ ], ++) ++ + cc_library( + name = "bind_front", ++ deps = [ ++ "//absl/base:base_internal", ++ "//absl/container:compressed_tuple", ++ "//absl/meta:type_traits", ++ "//absl/utility", ++ ], + ) + + cc_library( + name = "function_ref", ++ deps = [ ++ "//absl/base:base_internal", ++ "//absl/base:core_headers", ++ "//absl/meta:type_traits", ++ ], + ) +diff --git a/third_party/absl/system.absl.random.BUILD b/third_party/absl/system.absl.random.BUILD +index 948de07751a..5ebd656be8e 100644 +--- a/third_party/absl/system.absl.random.BUILD ++++ b/third_party/absl/system.absl.random.BUILD +@@ -51,3 +51,15 @@ cc_library( + "//absl/types:span", + ], + ) ++ ++cc_library( ++ name = "bit_gen_ref", ++ deps = [ ++ ":random", ++ "//absl/base:core_headers", ++ "//absl/base:fast_type_id", ++ "//absl/meta:type_traits", ++ "//absl/random/internal:distribution_caller", ++ "//absl/random/internal:fast_uniform_bits", ++ ], ++) +-- +2.41.0 + diff --git a/sci-libs/tensorflow/files/tensorflow-2.14.1-0006-systemlib-Update-targets-for-absl_py.patch b/sci-libs/tensorflow/files/tensorflow-2.14.1-0006-systemlib-Update-targets-for-absl_py.patch new file mode 100644 index 000000000000..fa021358998c --- /dev/null +++ b/sci-libs/tensorflow/files/tensorflow-2.14.1-0006-systemlib-Update-targets-for-absl_py.patch @@ -0,0 +1,24 @@ +From d2dc4d308a83cb2d1620e7c5213ec570fe3138af Mon Sep 17 00:00:00 2001 +From: wangjiezhe <wangjiezhe@gmail.com> +Date: Fri, 24 Nov 2023 16:48:15 +0800 +Subject: [PATCH 06/13] systemlib: Update targets for absl_py + +--- + third_party/systemlibs/absl_py.absl.flags.BUILD | 3 +++ + 1 file changed, 3 insertions(+) + +diff --git a/third_party/systemlibs/absl_py.absl.flags.BUILD b/third_party/systemlibs/absl_py.absl.flags.BUILD +index d92f4949df1..614938fb8c4 100644 +--- a/third_party/systemlibs/absl_py.absl.flags.BUILD ++++ b/third_party/systemlibs/absl_py.absl.flags.BUILD +@@ -8,4 +8,7 @@ py_library( + + py_library( + name = "argparse_flags", ++ deps = [ ++ ":flags", ++ ], + ) +-- +2.41.0 + diff --git a/sci-libs/tensorflow/files/tensorflow-2.14.1-0007-systemlib-Add-well_known_types_py_pb2-target.patch b/sci-libs/tensorflow/files/tensorflow-2.14.1-0007-systemlib-Add-well_known_types_py_pb2-target.patch new file mode 100644 index 000000000000..655be6bc919f --- /dev/null +++ b/sci-libs/tensorflow/files/tensorflow-2.14.1-0007-systemlib-Add-well_known_types_py_pb2-target.patch @@ -0,0 +1,28 @@ +From e58f5674af07a3853e59c32b92d91e590b0224e2 Mon Sep 17 00:00:00 2001 +From: Jason Zaman <jason@perfinion.com> +Date: Sat, 3 Jun 2023 16:23:51 -0700 +Subject: [PATCH 07/13] systemlib: Add well_known_types_py_pb2 target + +Bug: https://github.com/tensorflow/tensorflow/issues/60667 +--- + third_party/systemlibs/protobuf.BUILD | 7 +++++++ + 1 file changed, 7 insertions(+) + +diff --git a/third_party/systemlibs/protobuf.BUILD b/third_party/systemlibs/protobuf.BUILD +index 4d05ab28d12..b3d72b0e3ad 100644 +--- a/third_party/systemlibs/protobuf.BUILD ++++ b/third_party/systemlibs/protobuf.BUILD +@@ -111,3 +111,10 @@ py_library( + visibility = ["//visibility:public"], + deps = [dep + "_proto" for dep in proto[1][1]], + ) for proto in WELL_KNOWN_PROTO_MAP.items()] ++ ++py_proto_library( ++ name = "well_known_types_py_pb2", ++ include = ".", ++ srcs = [proto[1][0] for proto in WELL_KNOWN_PROTO_MAP.items()], ++ visibility = ["//visibility:public"], ++) +-- +2.41.0 + diff --git a/sci-libs/tensorflow/files/tensorflow-2.14.1-0008-Relax-setup.py-version-requirements.patch b/sci-libs/tensorflow/files/tensorflow-2.14.1-0008-Relax-setup.py-version-requirements.patch new file mode 100644 index 000000000000..5d1667d75e11 --- /dev/null +++ b/sci-libs/tensorflow/files/tensorflow-2.14.1-0008-Relax-setup.py-version-requirements.patch @@ -0,0 +1,38 @@ +From e6cecad5c2595cb1166a78b698377f12da6e7a09 Mon Sep 17 00:00:00 2001 +From: wangjiezhe <wangjiezhe@gmail.com> +Date: Fri, 24 Nov 2023 16:54:18 +0800 +Subject: [PATCH 08/13] Relax setup.py version requirements + +--- + tensorflow/tools/pip_package/setup.py | 8 ++++---- + 1 file changed, 4 insertions(+), 4 deletions(-) + +diff --git a/tensorflow/tools/pip_package/setup.py b/tensorflow/tools/pip_package/setup.py +index fdb718d1628..3897d5316ba 100644 +--- a/tensorflow/tools/pip_package/setup.py ++++ b/tensorflow/tools/pip_package/setup.py +@@ -101,8 +101,8 @@ REQUIRED_PACKAGES = [ + 'six >= 1.12.0', + 'termcolor >= 1.1.0', + 'typing_extensions >= 3.6.6', +- 'wrapt >= 1.11.0, < 1.15', +- 'tensorflow-io-gcs-filesystem >= 0.23.1', ++ 'wrapt >= 1.11.0', ++ # 'tensorflow-io-gcs-filesystem >= 0.23.1', + # grpcio does not build correctly on big-endian machines due to lack of + # BoringSSL support. + # See https://github.com/tensorflow/tensorflow/issues/17882. +@@ -140,8 +140,8 @@ FAKE_REQUIRED_PACKAGES = [ + _VERSION + ';platform_system=="Windows"', + ] + +-if platform.system() == 'Linux' and platform.machine() == 'x86_64': +- REQUIRED_PACKAGES.append(FAKE_REQUIRED_PACKAGES) ++# if platform.system() == 'Linux' and platform.machine() == 'x86_64': ++# REQUIRED_PACKAGES.append(FAKE_REQUIRED_PACKAGES) + + if collaborator_build: + # If this is a collaborator build, then build an "installer" wheel and +-- +2.41.0 + diff --git a/sci-libs/tensorflow/files/tensorflow-2.14.1-0009-systemlib-update-targets-for-absl.patch b/sci-libs/tensorflow/files/tensorflow-2.14.1-0009-systemlib-update-targets-for-absl.patch new file mode 100644 index 000000000000..6b946461fba6 --- /dev/null +++ b/sci-libs/tensorflow/files/tensorflow-2.14.1-0009-systemlib-update-targets-for-absl.patch @@ -0,0 +1,365 @@ +From 1a72b50ed5054cb025c0aa2a39ce2499417f2d76 Mon Sep 17 00:00:00 2001 +From: wangjiezhe <wangjiezhe@gmail.com> +Date: Sun, 26 Nov 2023 13:12:20 +0800 +Subject: [PATCH 09/13] systemlib: update targets for absl + +--- + .../compiler/mlir/tools/kernel_gen/BUILD | 1 + + .../distribute/experimental/rpc/kernels/BUILD | 1 + + third_party/absl/system.absl.debugging.BUILD | 20 +- + third_party/absl/system.absl.log.BUILD | 271 ++++++++++++++++++ + third_party/absl/workspace.bzl | 1 + + 5 files changed, 289 insertions(+), 5 deletions(-) + create mode 100644 third_party/absl/system.absl.log.BUILD + +diff --git a/tensorflow/compiler/mlir/tools/kernel_gen/BUILD b/tensorflow/compiler/mlir/tools/kernel_gen/BUILD +index 71d85d2c96e..f4a479a9daf 100644 +--- a/tensorflow/compiler/mlir/tools/kernel_gen/BUILD ++++ b/tensorflow/compiler/mlir/tools/kernel_gen/BUILD +@@ -107,6 +107,7 @@ tf_cc_binary( + "//tensorflow/compiler/mlir:init_mlir", + "//tensorflow/compiler/mlir/tensorflow", + "//tensorflow/core:lib", ++ "@com_google_absl//absl/log:check", + "@com_google_absl//absl/strings", + "@llvm-project//llvm:AArch64CodeGen", # fixdeps: keep + "@llvm-project//llvm:ARMCodeGen", # fixdeps: keep +diff --git a/tensorflow/distribute/experimental/rpc/kernels/BUILD b/tensorflow/distribute/experimental/rpc/kernels/BUILD +index f9a525364c5..8b7f7b54761 100644 +--- a/tensorflow/distribute/experimental/rpc/kernels/BUILD ++++ b/tensorflow/distribute/experimental/rpc/kernels/BUILD +@@ -65,6 +65,7 @@ tf_kernel_library( + "//tensorflow/distribute/experimental/rpc/proto:tf_rpc_service_proto_cc", + "@com_github_grpc_grpc//:grpc++", + "@com_google_absl//absl/status", ++ "@com_google_absl//absl/log:check", + "@com_google_absl//absl/strings", + "@com_google_absl//absl/strings:str_format", + ], +diff --git a/third_party/absl/system.absl.debugging.BUILD b/third_party/absl/system.absl.debugging.BUILD +index 931ffdc9e92..223db7b4c46 100644 +--- a/third_party/absl/system.absl.debugging.BUILD ++++ b/third_party/absl/system.absl.debugging.BUILD +@@ -26,15 +26,25 @@ cc_library( + + cc_library( + name = "failure_signal_handler", +- linkopts = [ +- "-labsl_failure_signal_handler", +- "-labsl_examine_stack", ++ linkopts = ["-labsl_failure_signal_handler"], ++ deps = [ ++ ":examine_stack", ++ ":stacktrace", ++ "//absl/base", ++ "//absl/base:config", ++ "//absl/base:core_headers", ++ "//absl/base:raw_logging_internal", + ], ++) ++ ++cc_library( ++ name = "examine_stack", ++ linkopts = ["-labsl_examine_stack"], + deps = [ + ":stacktrace", + ":symbolize", +- "//absl/base", +- "//absl/base:errno_saver", ++ "//absl/base:config", ++ "//absl/base:core_headers", + "//absl/base:raw_logging_internal", + ], + ) +diff --git a/third_party/absl/system.absl.log.BUILD b/third_party/absl/system.absl.log.BUILD +new file mode 100644 +index 00000000000..9a2a5de657e +--- /dev/null ++++ b/third_party/absl/system.absl.log.BUILD +@@ -0,0 +1,271 @@ ++load("@rules_cc//cc:defs.bzl", "cc_library") ++ ++package(default_visibility = ["//visibility:public"]) ++ ++cc_library( ++ name = "log", ++ deps = [ ++ "//absl/log:internal_log_impl", ++ ], ++) ++ ++cc_library( ++ name = "internal_log_impl", ++ deps = [ ++ ":internal_conditions", ++ ":internal_message", ++ ":internal_strip", ++ ], ++) ++ ++cc_library( ++ name = "internal_conditions", ++ linkopts = ["-labsl_log_internal_conditions"], ++ deps = [ ++ ":internal_voidify", ++ "//absl/base", ++ "//absl/base:config", ++ "//absl/base:core_headers", ++ ], ++) ++ ++cc_library( ++ name = "internal_voidify", ++ deps = ["//absl/base:config"], ++) ++ ++cc_library( ++ name = "internal_message", ++ linkopts = ["-labsl_log_internal_message"], ++ deps = [ ++ ":entry", ++ ":globals", ++ ":internal_append_truncated", ++ ":internal_format", ++ ":internal_globals", ++ ":internal_log_sink_set", ++ ":internal_nullguard", ++ ":internal_proto", ++ ":severity", ++ ":sink", ++ ":sink_registry", ++ "//absl/base", ++ "//absl/base:config", ++ "//absl/base:core_headers", ++ "//absl/base:errno_saver", ++ "//absl/base:raw_logging_internal", ++ "//absl/base:strerror", ++ "//absl/container:inlined_vector", ++ "//absl/debugging:examine_stack", ++ "//absl/memory", ++ "//absl/strings", ++ "//absl/time", ++ "//absl/types:span", ++ ], ++) ++ ++cc_library( ++ name = "internal_append_truncated", ++ deps = [ ++ "//absl/base:config", ++ "//absl/strings", ++ "//absl/types:span", ++ ], ++) ++ ++cc_library( ++ name = "internal_format", ++ linkopts = ["-labsl_log_internal_format"], ++ deps = [ ++ ":internal_append_truncated", ++ ":internal_config", ++ ":internal_globals", ++ ":severity", ++ "//absl/base:config", ++ "//absl/base:core_headers", ++ "//absl/strings", ++ "//absl/strings:str_format", ++ "//absl/time", ++ "//absl/types:span", ++ ], ++) ++ ++cc_library( ++ name = "internal_globals", ++ linkopts = ["-labsl_log_internal_globals"], ++ deps = [ ++ ":severity", ++ "//absl/base:config", ++ "//absl/base:core_headers", ++ "//absl/base:raw_logging_internal", ++ "//absl/strings", ++ "//absl/time", ++ ], ++) ++ ++cc_library( ++ name = "internal_proto", ++ linkopts = ["-labsl_log_internal_proto"], ++ deps = [ ++ "//absl/base", ++ "//absl/base:config", ++ "//absl/base:core_headers", ++ "//absl/strings", ++ "//absl/types:span", ++ ], ++) ++ ++cc_library( ++ name = "internal_log_sink_set", ++ linkopts = ["-labsl_log_internal_log_sink_set"], ++ deps = [ ++ ":entry", ++ ":globals", ++ ":internal_config", ++ ":internal_globals", ++ ":severity", ++ ":sink", ++ "//absl/base", ++ "//absl/base:config", ++ "//absl/base:core_headers", ++ "//absl/base:raw_logging_internal", ++ "//absl/cleanup", ++ "//absl/strings", ++ "//absl/synchronization", ++ "//absl/types:span", ++ ], ++) ++ ++cc_library( ++ name = "internal_config", ++ deps = [ ++ "//absl/base:config", ++ "//absl/base:core_headers", ++ ], ++) ++ ++cc_library( ++ name = "internal_nullguard", ++ linkopts = ["-labsl_log_internal_nullguard"], ++ deps = [ ++ "//absl/base:config", ++ "//absl/base:core_headers", ++ ], ++) ++ ++cc_library( ++ name = "globals", ++ linkopts = ["-labsl_log_globals"], ++ deps = [ ++ ":severity", ++ "//absl/base:atomic_hook", ++ "//absl/base:config", ++ "//absl/base:core_headers", ++ "//absl/base:raw_logging_internal", ++ "//absl/hash", ++ "//absl/strings", ++ ], ++) ++ ++cc_library( ++ name = "entry", ++ linkopts = ["-labsl_log_entry"], ++ deps = [ ++ ":internal_config", ++ ":severity", ++ "//absl/base:config", ++ "//absl/base:core_headers", ++ "//absl/strings", ++ "//absl/time", ++ "//absl/types:span", ++ ], ++) ++ ++cc_library( ++ name = "severity", ++ linkopts = ["-labsl_log_severity"], ++ deps = ["//absl/base:core_headers"], ++) ++ ++cc_library( ++ name = "sink", ++ linkopts = ["-labsl_log_sink"], ++ deps = [ ++ ":entry", ++ "//absl/base:config", ++ ], ++) ++ ++cc_library( ++ name = "sink_registry", ++ deps = [ ++ ":internal_log_sink_set", ++ ":sink", ++ "//absl/base:config", ++ ], ++) ++ ++cc_library( ++ name = "internal_strip", ++ deps = [ ++ ":internal_message", ++ ":internal_nullstream", ++ ":severity", ++ ], ++) ++ ++cc_library( ++ name = "internal_nullstream", ++ deps = [ ++ ":severity", ++ "//absl/base:config", ++ "//absl/base:core_headers", ++ "//absl/strings", ++ ], ++) ++ ++cc_library( ++ name = "check", ++ deps = [ ++ ":internal_check_impl", ++ ":internal_check_op", ++ ":internal_conditions", ++ ":internal_message", ++ ":internal_strip", ++ "//absl/base:core_headers", ++ ], ++) ++ ++cc_library( ++ name = "internal_check_impl", ++ deps = [ ++ ":internal_check_op", ++ ":internal_conditions", ++ ":internal_message", ++ ":internal_strip", ++ "//absl/base:core_headers", ++ ], ++) ++ ++cc_library( ++ name = "internal_check_op", ++ linkopts = ["-labsl_log_internal_check_op"], ++ deps = [ ++ ":internal_nullguard", ++ ":internal_nullstream", ++ ":internal_strip", ++ "//absl/base:config", ++ "//absl/base:core_headers", ++ "//absl/strings", ++ ], ++) ++ ++cc_library( ++ name = "absl_check", ++ deps = [":internal_check_impl"], ++) ++ ++cc_library( ++ name = "absl_log", ++ deps = [":internal_log_impl"], ++) +diff --git a/third_party/absl/workspace.bzl b/third_party/absl/workspace.bzl +index 07f49cebb78..a7f4e5ffc44 100644 +--- a/third_party/absl/workspace.bzl ++++ b/third_party/absl/workspace.bzl +@@ -20,6 +20,7 @@ def repo(): + "flags", + "functional", + "hash", ++ "log", + "memory", + "meta", + "numeric", +-- +2.41.0 + diff --git a/sci-libs/tensorflow/files/tensorflow-2.14.1-0010-systemlib-fix-missing-osx-in-pybind11.patch b/sci-libs/tensorflow/files/tensorflow-2.14.1-0010-systemlib-fix-missing-osx-in-pybind11.patch new file mode 100644 index 000000000000..24b7cf4eec90 --- /dev/null +++ b/sci-libs/tensorflow/files/tensorflow-2.14.1-0010-systemlib-fix-missing-osx-in-pybind11.patch @@ -0,0 +1,25 @@ +From ce5e7c9b7f0a667514a65dc58ca67b61fa591c8f Mon Sep 17 00:00:00 2001 +From: wangjiezhe <wangjiezhe@gmail.com> +Date: Thu, 21 Dec 2023 22:22:35 +0800 +Subject: [PATCH 10/13] systemlib: fix missing `:osx` in pybind11 + +--- + third_party/systemlibs/pybind11.BUILD | 5 +++++ + 1 file changed, 5 insertions(+) + +diff --git a/third_party/systemlibs/pybind11.BUILD b/third_party/systemlibs/pybind11.BUILD +index 79a483d7b5d..cda63fbd019 100644 +--- a/third_party/systemlibs/pybind11.BUILD ++++ b/third_party/systemlibs/pybind11.BUILD +@@ -6,3 +6,8 @@ cc_library( + "@org_tensorflow//third_party/python_runtime:headers", + ], + ) ++ ++config_setting( ++ name = "osx", ++ constraint_values = ["@platforms//os:osx"], ++) +-- +2.41.0 + diff --git a/sci-libs/tensorflow/files/tensorflow-2.14.1-0011-systemlib-fix-missing-LICENSE-in-flatbuffers.patch b/sci-libs/tensorflow/files/tensorflow-2.14.1-0011-systemlib-fix-missing-LICENSE-in-flatbuffers.patch new file mode 100644 index 000000000000..acd46106115f --- /dev/null +++ b/sci-libs/tensorflow/files/tensorflow-2.14.1-0011-systemlib-fix-missing-LICENSE-in-flatbuffers.patch @@ -0,0 +1,25 @@ +From 084723bca84ba51f7f67209618b5a4e064c1576a Mon Sep 17 00:00:00 2001 +From: wangjiezhe <wangjiezhe@gmail.com> +Date: Thu, 21 Dec 2023 22:24:24 +0800 +Subject: [PATCH 11/13] systemlib: fix missing `LICENSE` in flatbuffers + +--- + third_party/flatbuffers/BUILD.system | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/third_party/flatbuffers/BUILD.system b/third_party/flatbuffers/BUILD.system +index 8fe4d7a5907..b1d63b4ca0f 100644 +--- a/third_party/flatbuffers/BUILD.system ++++ b/third_party/flatbuffers/BUILD.system +@@ -1,7 +1,7 @@ + licenses(["notice"]) # Apache 2.0 + + filegroup( +- name = "LICENSE.txt", ++ name = "LICENSE", + visibility = ["//visibility:public"], + ) + +-- +2.41.0 + diff --git a/sci-libs/tensorflow/files/tensorflow-2.14.1-0012-build-use-non-hermetic-python.patch b/sci-libs/tensorflow/files/tensorflow-2.14.1-0012-build-use-non-hermetic-python.patch new file mode 100644 index 000000000000..67108a290e13 --- /dev/null +++ b/sci-libs/tensorflow/files/tensorflow-2.14.1-0012-build-use-non-hermetic-python.patch @@ -0,0 +1,2745 @@ +From 3f0e4685b47f71c80b18bc5b6cba1afd56070604 Mon Sep 17 00:00:00 2001 +From: wangjiezhe <wangjiezhe@gmail.com> +Date: Thu, 21 Dec 2023 22:25:46 +0800 +Subject: [PATCH 12/13] build: use non-hermetic python + +--- + WORKSPACE | 65 -- + tensorflow/BUILD | 2 - + tensorflow/compiler/mlir/glob_lit_test.bzl | 1 - + tensorflow/compiler/xla/glob_lit_test.bzl | 5 - + tensorflow/compiler/xla/mlir_hlo/tests/BUILD | 1 - + tensorflow/dtensor/python/tests/BUILD | 1 - + tensorflow/lite/python/BUILD | 1 - + tensorflow/python/BUILD | 1 - + tensorflow/python/compiler/tensorrt/BUILD | 1 - + .../experimental/kernel_tests/service/BUILD | 1 - + tensorflow/python/debug/lib/BUILD | 1 - + .../python/distribute/experimental/rpc/BUILD | 1 - + .../python/distribute/failure_handling/BUILD | 1 - + tensorflow/python/eager/BUILD | 1 - + tensorflow/python/estimator/BUILD | 5 +- + tensorflow/python/framework/BUILD | 2 - + tensorflow/python/keras/BUILD | 1 - + tensorflow/python/keras/engine/BUILD | 1 - + tensorflow/python/keras/saving/BUILD | 1 - + tensorflow/python/profiler/BUILD | 1 - + .../python/profiler/integration_test/BUILD | 1 - + tensorflow/python/summary/BUILD | 1 - + third_party/py/BUILD.tpl | 39 +- + third_party/py/{non_hermetic => }/README | 0 + third_party/py/non_hermetic/BUILD | 0 + third_party/py/non_hermetic/BUILD.tpl | 80 -- + third_party/py/non_hermetic/ml_dtypes/BUILD | 0 + third_party/py/non_hermetic/ml_dtypes/LICENSE | 202 ---- + .../py/non_hermetic/ml_dtypes/ml_dtypes.BUILD | 50 - + .../ml_dtypes/ml_dtypes.tests.BUILD | 60 -- + .../py/non_hermetic/ml_dtypes/workspace.bzl | 22 - + third_party/py/non_hermetic/numpy/BUILD | 21 - + third_party/py/non_hermetic/numpy/README.md | 4 - + .../py/non_hermetic/numpy/tf_numpy_api/BUILD | 12 - + ...ensorflow.experimental.numpy.ndarray.pbtxt | 51 - + .../tensorflow.experimental.numpy.pbtxt | 919 ------------------ + ...tensorflow.experimental.numpy.random.pbtxt | 35 - + .../py/non_hermetic/python_configure.bzl | 315 ------ + third_party/py/numpy/BUILD | 7 +- + third_party/py/numpy/LICENSE | 60 -- + .../tensorflow.experimental.numpy.pbtxt | 2 +- + third_party/py/python_configure.bzl | 252 ++++- + 42 files changed, 291 insertions(+), 1936 deletions(-) + rename third_party/py/{non_hermetic => }/README (100%) + delete mode 100644 third_party/py/non_hermetic/BUILD + delete mode 100644 third_party/py/non_hermetic/BUILD.tpl + delete mode 100644 third_party/py/non_hermetic/ml_dtypes/BUILD + delete mode 100644 third_party/py/non_hermetic/ml_dtypes/LICENSE + delete mode 100644 third_party/py/non_hermetic/ml_dtypes/ml_dtypes.BUILD + delete mode 100644 third_party/py/non_hermetic/ml_dtypes/ml_dtypes.tests.BUILD + delete mode 100644 third_party/py/non_hermetic/ml_dtypes/workspace.bzl + delete mode 100644 third_party/py/non_hermetic/numpy/BUILD + delete mode 100644 third_party/py/non_hermetic/numpy/README.md + delete mode 100644 third_party/py/non_hermetic/numpy/tf_numpy_api/BUILD + delete mode 100644 third_party/py/non_hermetic/numpy/tf_numpy_api/tensorflow.experimental.numpy.ndarray.pbtxt + delete mode 100644 third_party/py/non_hermetic/numpy/tf_numpy_api/tensorflow.experimental.numpy.pbtxt + delete mode 100644 third_party/py/non_hermetic/numpy/tf_numpy_api/tensorflow.experimental.numpy.random.pbtxt + delete mode 100644 third_party/py/non_hermetic/python_configure.bzl + delete mode 100644 third_party/py/numpy/LICENSE + +diff --git a/WORKSPACE b/WORKSPACE +index 644b731b1dc..3626ae4e805 100644 +--- a/WORKSPACE ++++ b/WORKSPACE +@@ -11,71 +11,6 @@ http_archive( + ], + ) + +-# We must initialize hermetic python first. +-load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") +- +-http_archive( +- name = "bazel_skylib", +- sha256 = "74d544d96f4a5bb630d465ca8bbcfe231e3594e5aae57e1edbf17a6eb3ca2506", +- urls = [ +- "https://storage.googleapis.com/mirror.tensorflow.org/github.com/bazelbuild/bazel-skylib/releases/download/1.3.0/bazel-skylib-1.3.0.tar.gz", +- "https://github.com/bazelbuild/bazel-skylib/releases/download/1.3.0/bazel-skylib-1.3.0.tar.gz", +- ], +-) +- +-http_archive( +- name = "rules_python", +- sha256 = "84aec9e21cc56fbc7f1335035a71c850d1b9b5cc6ff497306f84cced9a769841", +- strip_prefix = "rules_python-0.23.1", +- url = "https://github.com/bazelbuild/rules_python/releases/download/0.23.1/rules_python-0.23.1.tar.gz", +-) +- +-load("@rules_python//python:repositories.bzl", "python_register_toolchains") +-load( +- "//tensorflow/tools/toolchains/python:python_repo.bzl", +- "python_repository", +-) +- +-python_repository(name = "python_version_repo") +- +-load("@python_version_repo//:py_version.bzl", "HERMETIC_PYTHON_VERSION") +- +-python_register_toolchains( +- name = "python", +- ignore_root_user_error = True, +- python_version = HERMETIC_PYTHON_VERSION, +-) +- +-load("@python//:defs.bzl", "interpreter") +-load("@rules_python//python:pip.bzl", "package_annotation", "pip_parse") +- +-NUMPY_ANNOTATIONS = { +- "numpy": package_annotation( +- additive_build_content = """\ +-filegroup( +- name = "includes", +- srcs = glob(["site-packages/numpy/core/include/**/*.h"]), +-) +-cc_library( +- name = "numpy_headers", +- hdrs = [":includes"], +- strip_include_prefix="site-packages/numpy/core/include/", +-) +-""", +- ), +-} +- +-pip_parse( +- name = "pypi", +- annotations = NUMPY_ANNOTATIONS, +- python_interpreter_target = interpreter, +- requirements = "//:requirements_lock_" + HERMETIC_PYTHON_VERSION.replace(".", "_") + ".txt", +-) +- +-load("@pypi//:requirements.bzl", "install_deps") +- +-install_deps() +- + # Initialize the TensorFlow repository and all dependencies. + # + # The cascade of load() statements and tf_workspace?() calls works around the +diff --git a/tensorflow/BUILD b/tensorflow/BUILD +index 63ce1e7b385..9573a982298 100644 +--- a/tensorflow/BUILD ++++ b/tensorflow/BUILD +@@ -1718,8 +1718,6 @@ py_library( + "//tensorflow/lite/python:lite", + "//tensorflow/lite/python/authoring", + "//tensorflow/python:no_contrib", +- "@pypi_keras//:pkg", +- "@pypi_tensorboard//:pkg", + ], + ) + # copybara:comment_end +diff --git a/tensorflow/compiler/mlir/glob_lit_test.bzl b/tensorflow/compiler/mlir/glob_lit_test.bzl +index e689b4c0b31..f65c86b727b 100644 +--- a/tensorflow/compiler/mlir/glob_lit_test.bzl ++++ b/tensorflow/compiler/mlir/glob_lit_test.bzl +@@ -58,7 +58,6 @@ def _run_lit_test(name, data, size, tags, driver, features, exec_properties): + "@llvm-project//llvm:count", + "@llvm-project//llvm:not", + ], +- deps = ["@pypi_lit//:pkg"], + size = size, + main = "lit.py", + exec_properties = exec_properties, +diff --git a/tensorflow/compiler/xla/glob_lit_test.bzl b/tensorflow/compiler/xla/glob_lit_test.bzl +index 44b838ccb0a..86200b24da1 100644 +--- a/tensorflow/compiler/xla/glob_lit_test.bzl ++++ b/tensorflow/compiler/xla/glob_lit_test.bzl +@@ -52,10 +52,6 @@ def _run_lit_test(name, data, size, tags, driver, features, exec_properties): + # can remove this logic. This is necessary to have these tests run on builds + # using Python 3.11, but also to not include `@pypi_lit` in standalone xla + # builds where it won't be found. +- deps = [] +- if xla_root_dir == "tensorflow/compiler/xla/": +- deps.append("@pypi_lit//:pkg") +- + native.py_test( + name = name, + srcs = ["@llvm-project//llvm:lit"], +@@ -69,7 +65,6 @@ def _run_lit_test(name, data, size, tags, driver, features, exec_properties): + "@llvm-project//llvm:count", + "@llvm-project//llvm:not", + ], +- deps = deps, + size = size, + main = "lit.py", + exec_properties = exec_properties, +diff --git a/tensorflow/compiler/xla/mlir_hlo/tests/BUILD b/tensorflow/compiler/xla/mlir_hlo/tests/BUILD +index 3b67c8fdbec..30a3c562f75 100644 +--- a/tensorflow/compiler/xla/mlir_hlo/tests/BUILD ++++ b/tensorflow/compiler/xla/mlir_hlo/tests/BUILD +@@ -26,7 +26,6 @@ package( + tags = [ + "nomsan", # The execution engine doesn't work with msan, see b/248097619. + ], +- deps = ["@pypi_lit//:pkg"], + ) + for src in glob(["**/*.mlir"]) + ] +diff --git a/tensorflow/dtensor/python/tests/BUILD b/tensorflow/dtensor/python/tests/BUILD +index 615baad3085..9b6c5839b03 100644 +--- a/tensorflow/dtensor/python/tests/BUILD ++++ b/tensorflow/dtensor/python/tests/BUILD +@@ -303,7 +303,6 @@ pytype_strict_library( + ":test_util", + "//tensorflow/python/platform:client_testlib", + "@absl_py//absl/flags", +- "@pypi_portpicker//:pkg", + ], + ) + +diff --git a/tensorflow/lite/python/BUILD b/tensorflow/lite/python/BUILD +index cf03dad0ee0..8b771ac20ae 100644 +--- a/tensorflow/lite/python/BUILD ++++ b/tensorflow/lite/python/BUILD +@@ -266,7 +266,6 @@ py_test( + "//tensorflow/python/framework:test_lib", + "//tensorflow/python/platform:client_testlib", + "//tensorflow/python/platform:resource_loader", +- "@pypi_jax//:pkg", + ], + ) + +diff --git a/tensorflow/python/BUILD b/tensorflow/python/BUILD +index c1b7eb7c0ea..4093f4c5c09 100644 +--- a/tensorflow/python/BUILD ++++ b/tensorflow/python/BUILD +@@ -550,7 +550,6 @@ py_strict_library( + deps = [ + ":keras_lib", + "//third_party/py/numpy", +- "@pypi_scipy//:pkg", + "@six_archive//:six", + ], + ) +diff --git a/tensorflow/python/compiler/tensorrt/BUILD b/tensorflow/python/compiler/tensorrt/BUILD +index f3fd845ff53..78a45f4ed25 100644 +--- a/tensorflow/python/compiler/tensorrt/BUILD ++++ b/tensorflow/python/compiler/tensorrt/BUILD +@@ -69,7 +69,6 @@ py_strict_library( + "//tensorflow/python/util:nest", + "//tensorflow/python/util:tf_export", + "//third_party/py/numpy", +- "@pypi_packaging//:pkg", + "@six_archive//:six", + ], + ) +diff --git a/tensorflow/python/data/experimental/kernel_tests/service/BUILD b/tensorflow/python/data/experimental/kernel_tests/service/BUILD +index 8d36d2e3637..2b8a8fd3654 100644 +--- a/tensorflow/python/data/experimental/kernel_tests/service/BUILD ++++ b/tensorflow/python/data/experimental/kernel_tests/service/BUILD +@@ -143,7 +143,6 @@ tf_py_strict_test( + "//tensorflow/python/ops:array_ops", + "//tensorflow/python/platform:client_testlib", + "@absl_py//absl/testing:parameterized", +- "@pypi_portpicker//:pkg", + ], + ) + +diff --git a/tensorflow/python/debug/lib/BUILD b/tensorflow/python/debug/lib/BUILD +index 37c99b30dd2..012e349dffc 100644 +--- a/tensorflow/python/debug/lib/BUILD ++++ b/tensorflow/python/debug/lib/BUILD +@@ -596,7 +596,6 @@ py_strict_library( + "//tensorflow/python/lib/io:lib", + "//tensorflow/python/ops:variables", + "//tensorflow/python/util:compat", +- "@pypi_portpicker//:pkg", + ], + ) + +diff --git a/tensorflow/python/distribute/experimental/rpc/BUILD b/tensorflow/python/distribute/experimental/rpc/BUILD +index 94855205c70..3b3e3f9aee3 100644 +--- a/tensorflow/python/distribute/experimental/rpc/BUILD ++++ b/tensorflow/python/distribute/experimental/rpc/BUILD +@@ -60,6 +60,5 @@ tf_py_strict_test( + "//tensorflow/python/ops:variables", + "//tensorflow/python/platform:client_testlib", + "//tensorflow/python/util:nest", +- "@pypi_portpicker//:pkg", + ], + ) +diff --git a/tensorflow/python/distribute/failure_handling/BUILD b/tensorflow/python/distribute/failure_handling/BUILD +index 77317019fee..df52d80552e 100644 +--- a/tensorflow/python/distribute/failure_handling/BUILD ++++ b/tensorflow/python/distribute/failure_handling/BUILD +@@ -47,7 +47,6 @@ py_strict_library( + deps = [ + "//tensorflow/python/eager:context", + "//tensorflow/python/platform:tf_logging", +- "@pypi_requests//:pkg", + "@six_archive//:six", + ], + ) +diff --git a/tensorflow/python/eager/BUILD b/tensorflow/python/eager/BUILD +index b7bc8350e13..dc5e0ae232f 100644 +--- a/tensorflow/python/eager/BUILD ++++ b/tensorflow/python/eager/BUILD +@@ -1167,7 +1167,6 @@ cuda_py_strict_test( + "//tensorflow/python/training:server_lib", + "//tensorflow/python/util:compat", + "@absl_py//absl/testing:parameterized", +- "@pypi_portpicker//:pkg", + ], + ) + +diff --git a/tensorflow/python/estimator/BUILD b/tensorflow/python/estimator/BUILD +index 73a7c2626bb..0952ccb3154 100644 +--- a/tensorflow/python/estimator/BUILD ++++ b/tensorflow/python/estimator/BUILD +@@ -380,7 +380,8 @@ py_library( + ], + ) + +-alias( ++py_library( + name = "expect_tensorflow_estimator_installed", +- actual = "@pypi_tensorflow_estimator//:pkg", ++ srcs_version = "PY3", ++ visibility = ["//visibility:public"], + ) +diff --git a/tensorflow/python/framework/BUILD b/tensorflow/python/framework/BUILD +index d8ce1f5c0bf..1100c23b562 100644 +--- a/tensorflow/python/framework/BUILD ++++ b/tensorflow/python/framework/BUILD +@@ -359,7 +359,6 @@ py_strict_library( + "//tensorflow/python/util:deprecation", + "//tensorflow/python/util:tf_export", + "//third_party/py/numpy", +- "@pypi_packaging//:pkg", + ] + if_xla_available([ + "//tensorflow/python:_pywrap_tfcompile", + ]), +@@ -2036,7 +2035,6 @@ py_strict_library( + "//tensorflow/python/util/protobuf", + "//third_party/py/numpy", + "@absl_py//absl/testing:parameterized", +- "@pypi_portpicker//:pkg", + ], + ) + +diff --git a/tensorflow/python/keras/BUILD b/tensorflow/python/keras/BUILD +index c271a5ef77a..d516853a13e 100755 +--- a/tensorflow/python/keras/BUILD ++++ b/tensorflow/python/keras/BUILD +@@ -42,7 +42,6 @@ py_library( + "//tensorflow/python/saved_model", + "//tensorflow/python/training", + "//tensorflow/python/util:nest", +- "@pypi_h5py//:pkg", + ], + ) + +diff --git a/tensorflow/python/keras/engine/BUILD b/tensorflow/python/keras/engine/BUILD +index 2098b1650bc..287b1a4aa91 100644 +--- a/tensorflow/python/keras/engine/BUILD ++++ b/tensorflow/python/keras/engine/BUILD +@@ -93,7 +93,6 @@ py_library( + "//tensorflow/python/util:tf_decorator", + "//tensorflow/python/util:tf_export", + "//tensorflow/tools/docs:doc_controls", +- "@pypi_h5py//:pkg", + ], + ) + +diff --git a/tensorflow/python/keras/saving/BUILD b/tensorflow/python/keras/saving/BUILD +index d7cb2ccb2fc..b40d979c82d 100644 +--- a/tensorflow/python/keras/saving/BUILD ++++ b/tensorflow/python/keras/saving/BUILD +@@ -53,6 +53,5 @@ py_library( + "//tensorflow/python/platform:tf_logging", + "//tensorflow/python/saved_model", + "//tensorflow/python/training:saver", +- "@pypi_h5py//:pkg", + ], + ) +diff --git a/tensorflow/python/profiler/BUILD b/tensorflow/python/profiler/BUILD +index b1cfd6ea10c..9413aeeab8b 100644 +--- a/tensorflow/python/profiler/BUILD ++++ b/tensorflow/python/profiler/BUILD +@@ -43,7 +43,6 @@ cuda_py_strict_test( + "//tensorflow/python/eager:test", + "//tensorflow/python/framework:errors", + "//tensorflow/python/framework:test_lib", +- "@pypi_portpicker//:pkg", + ], + ) + +diff --git a/tensorflow/python/profiler/integration_test/BUILD b/tensorflow/python/profiler/integration_test/BUILD +index b20698ea6ea..e7060e5a315 100644 +--- a/tensorflow/python/profiler/integration_test/BUILD ++++ b/tensorflow/python/profiler/integration_test/BUILD +@@ -35,6 +35,5 @@ cuda_py_strict_test( + "//tensorflow/python/platform:tf_logging", + "//tensorflow/python/profiler:profiler_client", + "//tensorflow/python/profiler:profiler_v2", +- "@pypi_portpicker//:pkg", + ], + ) +diff --git a/tensorflow/python/summary/BUILD b/tensorflow/python/summary/BUILD +index 126fb6d31f7..b292e39356f 100644 +--- a/tensorflow/python/summary/BUILD ++++ b/tensorflow/python/summary/BUILD +@@ -121,6 +121,5 @@ tf_py_strict_test( + "//tensorflow/python/ops:summary_ops_v2", + "//tensorflow/python/platform:client_testlib", + "//tensorflow/python/training:training_util", +- "@pypi_tensorboard//:pkg", + ], + ) +diff --git a/third_party/py/BUILD.tpl b/third_party/py/BUILD.tpl +index 7cc1e085684..45480bd4a31 100644 +--- a/third_party/py/BUILD.tpl ++++ b/third_party/py/BUILD.tpl +@@ -5,17 +5,16 @@ package(default_visibility = ["//visibility:public"]) + # Point both runtimes to the same python binary to ensure we always + # use the python binary specified by ./configure.py script. + load("@bazel_tools//tools/python:toolchain.bzl", "py_runtime_pair") +-load("@python//:defs.bzl", "interpreter") + + py_runtime( + name = "py2_runtime", +- interpreter_path = interpreter, ++ interpreter_path = "%{PYTHON_BIN_PATH}", + python_version = "PY2", + ) + + py_runtime( + name = "py3_runtime", +- interpreter_path = interpreter, ++ interpreter_path = "%{PYTHON_BIN_PATH}", + python_version = "PY3", + ) + +@@ -33,8 +32,27 @@ toolchain( + exec_compatible_with = [%{PLATFORM_CONSTRAINT}], + ) + +-alias(name = "python_headers", +- actual = "@python//:python_headers") ++# To build Python C/C++ extension on Windows, we need to link to python import library pythonXY.lib ++# See https://docs.python.org/3/extending/windows.html ++cc_import( ++ name = "python_lib", ++ interface_library = select({ ++ ":windows": ":python_import_lib", ++ # A placeholder for Unix platforms which makes --no_build happy. ++ "//conditions:default": "not-existing.lib", ++ }), ++ system_provided = 1, ++) ++ ++cc_library( ++ name = "python_headers", ++ hdrs = [":python_include"], ++ deps = select({ ++ ":windows": [":python_lib"], ++ "//conditions:default": [], ++ }), ++ includes = ["python_include"], ++) + + # This alias is exists for the use of targets in the @llvm-project dependency, + # which expect a python_headers target called @python_runtime//:headers. We use +@@ -45,9 +63,18 @@ alias( + actual = ":python_headers", + ) + ++cc_library( ++ name = "numpy_headers", ++ hdrs = [":numpy_include"], ++ includes = ["numpy_include"], ++) + + config_setting( + name = "windows", + values = {"cpu": "x64_windows"}, + visibility = ["//visibility:public"], +-) +\ No newline at end of file ++) ++ ++%{PYTHON_INCLUDE_GENRULE} ++%{NUMPY_INCLUDE_GENRULE} ++%{PYTHON_IMPORT_LIB_GENRULE} +\ No newline at end of file +diff --git a/third_party/py/non_hermetic/README b/third_party/py/README +similarity index 100% +rename from third_party/py/non_hermetic/README +rename to third_party/py/README +diff --git a/third_party/py/non_hermetic/BUILD b/third_party/py/non_hermetic/BUILD +deleted file mode 100644 +index e69de29bb2d..00000000000 +diff --git a/third_party/py/non_hermetic/BUILD.tpl b/third_party/py/non_hermetic/BUILD.tpl +deleted file mode 100644 +index 45480bd4a31..00000000000 +--- a/third_party/py/non_hermetic/BUILD.tpl ++++ /dev/null +@@ -1,80 +0,0 @@ +-licenses(["restricted"]) +- +-package(default_visibility = ["//visibility:public"]) +- +-# Point both runtimes to the same python binary to ensure we always +-# use the python binary specified by ./configure.py script. +-load("@bazel_tools//tools/python:toolchain.bzl", "py_runtime_pair") +- +-py_runtime( +- name = "py2_runtime", +- interpreter_path = "%{PYTHON_BIN_PATH}", +- python_version = "PY2", +-) +- +-py_runtime( +- name = "py3_runtime", +- interpreter_path = "%{PYTHON_BIN_PATH}", +- python_version = "PY3", +-) +- +-py_runtime_pair( +- name = "py_runtime_pair", +- py2_runtime = ":py2_runtime", +- py3_runtime = ":py3_runtime", +-) +- +-toolchain( +- name = "py_toolchain", +- toolchain = ":py_runtime_pair", +- toolchain_type = "@bazel_tools//tools/python:toolchain_type", +- target_compatible_with = [%{PLATFORM_CONSTRAINT}], +- exec_compatible_with = [%{PLATFORM_CONSTRAINT}], +-) +- +-# To build Python C/C++ extension on Windows, we need to link to python import library pythonXY.lib +-# See https://docs.python.org/3/extending/windows.html +-cc_import( +- name = "python_lib", +- interface_library = select({ +- ":windows": ":python_import_lib", +- # A placeholder for Unix platforms which makes --no_build happy. +- "//conditions:default": "not-existing.lib", +- }), +- system_provided = 1, +-) +- +-cc_library( +- name = "python_headers", +- hdrs = [":python_include"], +- deps = select({ +- ":windows": [":python_lib"], +- "//conditions:default": [], +- }), +- includes = ["python_include"], +-) +- +-# This alias is exists for the use of targets in the @llvm-project dependency, +-# which expect a python_headers target called @python_runtime//:headers. We use +-# a repo_mapping to alias python_runtime to this package, and an alias to create +-# the correct target. +-alias( +- name = "headers", +- actual = ":python_headers", +-) +- +-cc_library( +- name = "numpy_headers", +- hdrs = [":numpy_include"], +- includes = ["numpy_include"], +-) +- +-config_setting( +- name = "windows", +- values = {"cpu": "x64_windows"}, +- visibility = ["//visibility:public"], +-) +- +-%{PYTHON_INCLUDE_GENRULE} +-%{NUMPY_INCLUDE_GENRULE} +-%{PYTHON_IMPORT_LIB_GENRULE} +\ No newline at end of file +diff --git a/third_party/py/non_hermetic/ml_dtypes/BUILD b/third_party/py/non_hermetic/ml_dtypes/BUILD +deleted file mode 100644 +index e69de29bb2d..00000000000 +diff --git a/third_party/py/non_hermetic/ml_dtypes/LICENSE b/third_party/py/non_hermetic/ml_dtypes/LICENSE +deleted file mode 100644 +index d6456956733..00000000000 +--- a/third_party/py/non_hermetic/ml_dtypes/LICENSE ++++ /dev/null +@@ -1,202 +0,0 @@ +- +- Apache License +- Version 2.0, January 2004 +- http://www.apache.org/licenses/ +- +- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION +- +- 1. Definitions. +- +- "License" shall mean the terms and conditions for use, reproduction, +- and distribution as defined by Sections 1 through 9 of this document. +- +- "Licensor" shall mean the copyright owner or entity authorized by +- the copyright owner that is granting the License. +- +- "Legal Entity" shall mean the union of the acting entity and all +- other entities that control, are controlled by, or are under common +- control with that entity. For the purposes of this definition, +- "control" means (i) the power, direct or indirect, to cause the +- direction or management of such entity, whether by contract or +- otherwise, or (ii) ownership of fifty percent (50%) or more of the +- outstanding shares, or (iii) beneficial ownership of such entity. +- +- "You" (or "Your") shall mean an individual or Legal Entity +- exercising permissions granted by this License. +- +- "Source" form shall mean the preferred form for making modifications, +- including but not limited to software source code, documentation +- source, and configuration files. +- +- "Object" form shall mean any form resulting from mechanical +- transformation or translation of a Source form, including but +- not limited to compiled object code, generated documentation, +- and conversions to other media types. +- +- "Work" shall mean the work of authorship, whether in Source or +- Object form, made available under the License, as indicated by a +- copyright notice that is included in or attached to the work +- (an example is provided in the Appendix below). +- +- "Derivative Works" shall mean any work, whether in Source or Object +- form, that is based on (or derived from) the Work and for which the +- editorial revisions, annotations, elaborations, or other modifications +- represent, as a whole, an original work of authorship. For the purposes +- of this License, Derivative Works shall not include works that remain +- separable from, or merely link (or bind by name) to the interfaces of, +- the Work and Derivative Works thereof. +- +- "Contribution" shall mean any work of authorship, including +- the original version of the Work and any modifications or additions +- to that Work or Derivative Works thereof, that is intentionally +- submitted to Licensor for inclusion in the Work by the copyright owner +- or by an individual or Legal Entity authorized to submit on behalf of +- the copyright owner. For the purposes of this definition, "submitted" +- means any form of electronic, verbal, or written communication sent +- to the Licensor or its representatives, including but not limited to +- communication on electronic mailing lists, source code control systems, +- and issue tracking systems that are managed by, or on behalf of, the +- Licensor for the purpose of discussing and improving the Work, but +- excluding communication that is conspicuously marked or otherwise +- designated in writing by the copyright owner as "Not a Contribution." +- +- "Contributor" shall mean Licensor and any individual or Legal Entity +- on behalf of whom a Contribution has been received by Licensor and +- subsequently incorporated within the Work. +- +- 2. Grant of Copyright License. Subject to the terms and conditions of +- this License, each Contributor hereby grants to You a perpetual, +- worldwide, non-exclusive, no-charge, royalty-free, irrevocable +- copyright license to reproduce, prepare Derivative Works of, +- publicly display, publicly perform, sublicense, and distribute the +- Work and such Derivative Works in Source or Object form. +- +- 3. Grant of Patent License. Subject to the terms and conditions of +- this License, each Contributor hereby grants to You a perpetual, +- worldwide, non-exclusive, no-charge, royalty-free, irrevocable +- (except as stated in this section) patent license to make, have made, +- use, offer to sell, sell, import, and otherwise transfer the Work, +- where such license applies only to those patent claims licensable +- by such Contributor that are necessarily infringed by their +- Contribution(s) alone or by combination of their Contribution(s) +- with the Work to which such Contribution(s) was submitted. If You +- institute patent litigation against any entity (including a +- cross-claim or counterclaim in a lawsuit) alleging that the Work +- or a Contribution incorporated within the Work constitutes direct +- or contributory patent infringement, then any patent licenses +- granted to You under this License for that Work shall terminate +- as of the date such litigation is filed. +- +- 4. Redistribution. You may reproduce and distribute copies of the +- Work or Derivative Works thereof in any medium, with or without +- modifications, and in Source or Object form, provided that You +- meet the following conditions: +- +- (a) You must give any other recipients of the Work or +- Derivative Works a copy of this License; and +- +- (b) You must cause any modified files to carry prominent notices +- stating that You changed the files; and +- +- (c) You must retain, in the Source form of any Derivative Works +- that You distribute, all copyright, patent, trademark, and +- attribution notices from the Source form of the Work, +- excluding those notices that do not pertain to any part of +- the Derivative Works; and +- +- (d) If the Work includes a "NOTICE" text file as part of its +- distribution, then any Derivative Works that You distribute must +- include a readable copy of the attribution notices contained +- within such NOTICE file, excluding those notices that do not +- pertain to any part of the Derivative Works, in at least one +- of the following places: within a NOTICE text file distributed +- as part of the Derivative Works; within the Source form or +- documentation, if provided along with the Derivative Works; or, +- within a display generated by the Derivative Works, if and +- wherever such third-party notices normally appear. The contents +- of the NOTICE file are for informational purposes only and +- do not modify the License. You may add Your own attribution +- notices within Derivative Works that You distribute, alongside +- or as an addendum to the NOTICE text from the Work, provided +- that such additional attribution notices cannot be construed +- as modifying the License. +- +- You may add Your own copyright statement to Your modifications and +- may provide additional or different license terms and conditions +- for use, reproduction, or distribution of Your modifications, or +- for any such Derivative Works as a whole, provided Your use, +- reproduction, and distribution of the Work otherwise complies with +- the conditions stated in this License. +- +- 5. Submission of Contributions. Unless You explicitly state otherwise, +- any Contribution intentionally submitted for inclusion in the Work +- by You to the Licensor shall be under the terms and conditions of +- this License, without any additional terms or conditions. +- Notwithstanding the above, nothing herein shall supersede or modify +- the terms of any separate license agreement you may have executed +- with Licensor regarding such Contributions. +- +- 6. Trademarks. This License does not grant permission to use the trade +- names, trademarks, service marks, or product names of the Licensor, +- except as required for reasonable and customary use in describing the +- origin of the Work and reproducing the content of the NOTICE file. +- +- 7. Disclaimer of Warranty. Unless required by applicable law or +- agreed to in writing, Licensor provides the Work (and each +- Contributor provides its Contributions) on an "AS IS" BASIS, +- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +- implied, including, without limitation, any warranties or conditions +- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A +- PARTICULAR PURPOSE. You are solely responsible for determining the +- appropriateness of using or redistributing the Work and assume any +- risks associated with Your exercise of permissions under this License. +- +- 8. Limitation of Liability. In no event and under no legal theory, +- whether in tort (including negligence), contract, or otherwise, +- unless required by applicable law (such as deliberate and grossly +- negligent acts) or agreed to in writing, shall any Contributor be +- liable to You for damages, including any direct, indirect, special, +- incidental, or consequential damages of any character arising as a +- result of this License or out of the use or inability to use the +- Work (including but not limited to damages for loss of goodwill, +- work stoppage, computer failure or malfunction, or any and all +- other commercial damages or losses), even if such Contributor +- has been advised of the possibility of such damages. +- +- 9. Accepting Warranty or Additional Liability. While redistributing +- the Work or Derivative Works thereof, You may choose to offer, +- and charge a fee for, acceptance of support, warranty, indemnity, +- or other liability obligations and/or rights consistent with this +- License. However, in accepting such obligations, You may act only +- on Your own behalf and on Your sole responsibility, not on behalf +- of any other Contributor, and only if You agree to indemnify, +- defend, and hold each Contributor harmless for any liability +- incurred by, or claims asserted against, such Contributor by reason +- of your accepting any such warranty or additional liability. +- +- END OF TERMS AND CONDITIONS +- +- APPENDIX: How to apply the Apache License to your work. +- +- To apply the Apache License to your work, attach the following +- boilerplate notice, with the fields enclosed by brackets "[]" +- replaced with your own identifying information. (Don't include +- the brackets!) The text should be enclosed in the appropriate +- comment syntax for the file format. We also recommend that a +- file or class name and description of purpose be included on the +- same "printed page" as the copyright notice for easier +- identification within third-party archives. +- +- Copyright [yyyy] [name of copyright owner] +- +- Licensed under the Apache License, Version 2.0 (the "License"); +- you may not use this file except in compliance with the License. +- You may obtain a copy of the License at +- +- http://www.apache.org/licenses/LICENSE-2.0 +- +- Unless required by applicable law or agreed to in writing, software +- distributed under the License is distributed on an "AS IS" BASIS, +- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +- See the License for the specific language governing permissions and +- limitations under the License. +diff --git a/third_party/py/non_hermetic/ml_dtypes/ml_dtypes.BUILD b/third_party/py/non_hermetic/ml_dtypes/ml_dtypes.BUILD +deleted file mode 100644 +index 95f58d3c476..00000000000 +--- a/third_party/py/non_hermetic/ml_dtypes/ml_dtypes.BUILD ++++ /dev/null +@@ -1,50 +0,0 @@ +-load("@pybind11_bazel//:build_defs.bzl", "pybind_extension") +- +-package( +- default_visibility = ["//visibility:public"], +- licenses = ["notice"], +-) +- +-exports_files(["LICENSE"]) +- +-cc_library( +- name = "float8", +- hdrs = ["include/float8.h"], +- # Internal headers are all relative to , but other packages +- # include these headers with the prefix. +- includes = [ +- ".", +- "ml_dtypes", +- ], +- deps = ["@org_tensorflow//third_party/eigen3"], +-) +- +-pybind_extension( +- name = "_custom_floats", +- srcs = [ +- "_src/common.h", +- "_src/custom_float.h", +- "_src/dtypes.cc", +- "_src/int4.h", +- "_src/numpy.cc", +- "_src/numpy.h", +- "_src/ufuncs.h", +- ], +- includes = ["ml_dtypes"], +- visibility = [":__subpackages__"], +- deps = [ +- ":float8", +- "@org_tensorflow//third_party/eigen3", +- "@org_tensorflow//third_party/py/numpy:headers", +- ], +-) +- +-py_library( +- name = "ml_dtypes", +- srcs = [ +- "__init__.py", +- "_finfo.py", +- "_iinfo.py", +- ], +- deps = [":_custom_floats"], +-) +diff --git a/third_party/py/non_hermetic/ml_dtypes/ml_dtypes.tests.BUILD b/third_party/py/non_hermetic/ml_dtypes/ml_dtypes.tests.BUILD +deleted file mode 100644 +index fde5f2eaccf..00000000000 +--- a/third_party/py/non_hermetic/ml_dtypes/ml_dtypes.tests.BUILD ++++ /dev/null +@@ -1,60 +0,0 @@ +-package( +- default_visibility = ["//visibility:public"], +-) +- +-py_library( +- name = "testing_base", +- deps = [ +- "//:ml_dtypes", +- "@absl_py//absl/testing:absltest", +- "@absl_py//absl/testing:parameterized", +- "@org_tensorflow//third_party/py/numpy", +- ], +-) +- +-py_test( +- name = "custom_float_test", +- srcs = ["custom_float_test.py"], +- main = "custom_float_test.py", +- deps = [":testing_base"], +-) +- +-py_test( +- name = "int4_test", +- srcs = ["int4_test.py"], +- main = "int4_test.py", +- deps = [":testing_base"], +-) +- +-py_test( +- name = "iinfo_test", +- srcs = ["iinfo_test.py"], +- main = "iinfo_test.py", +- deps = [":testing_base"], +-) +- +-py_test( +- name = "finfo_test", +- srcs = ["finfo_test.py"], +- main = "finfo_test.py", +- deps = [":testing_base"], +-) +- +-py_test( +- name = "metadata_test", +- srcs = ["metadata_test.py"], +- main = "metadata_test.py", +- deps = [":testing_base"], +-) +- +-cc_test( +- name = "float8_test", +- srcs = ["float8_test.cc"], +- linkstatic = 1, +- deps = [ +- "//:float8", +- "@com_google_absl//absl/strings", +- "@com_google_googletest//:gtest_main", +- "@org_tensorflow//third_party/eigen3", +- ], +-) +diff --git a/third_party/py/non_hermetic/ml_dtypes/workspace.bzl b/third_party/py/non_hermetic/ml_dtypes/workspace.bzl +deleted file mode 100644 +index 2c34f494c34..00000000000 +--- a/third_party/py/non_hermetic/ml_dtypes/workspace.bzl ++++ /dev/null +@@ -1,22 +0,0 @@ +-"""Provides the repo macro to import ml_dtypes. +- +-ml_dtypes provides machine-learning-specific data-types like bfloat16, +-float8 varieties, and int4. +-""" +- +-load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls") +- +-def repo(): +- ML_DTYPES_COMMIT = "5b9fc9ad978757654843f4a8d899715dbea30e88" +- ML_DTYPES_SHA256 = "9662811d9ab3823a56f8fa91b5a67fd82062b6dd4f187169b41e82a44e526455" +- tf_http_archive( +- name = "ml_dtypes", +- build_file = "//third_party/py/ml_dtypes:ml_dtypes.BUILD", +- link_files = { +- "//third_party/py/ml_dtypes:ml_dtypes.tests.BUILD": "tests/BUILD.bazel", +- "//third_party/py/ml_dtypes:LICENSE": "LICENSE", +- }, +- sha256 = ML_DTYPES_SHA256, +- strip_prefix = "ml_dtypes-{commit}/ml_dtypes".format(commit = ML_DTYPES_COMMIT), +- urls = tf_mirror_urls("https://github.com/jax-ml/ml_dtypes/archive/{commit}/ml_dtypes-{commit}.tar.gz".format(commit = ML_DTYPES_COMMIT)), +- ) +diff --git a/third_party/py/non_hermetic/numpy/BUILD b/third_party/py/non_hermetic/numpy/BUILD +deleted file mode 100644 +index c80cc5287bc..00000000000 +--- a/third_party/py/non_hermetic/numpy/BUILD ++++ /dev/null +@@ -1,21 +0,0 @@ +-licenses(["restricted"]) +- +-package(default_visibility = ["//visibility:public"]) +- +-py_library( +- name = "numpy", +- srcs = ["tf_numpy_dummy.py"], +- srcs_version = "PY3", +-) +- +-alias( +- name = "headers", +- actual = "@local_config_python//:numpy_headers", +-) +- +-genrule( +- name = "dummy", +- outs = ["tf_numpy_dummy.py"], +- cmd = "touch $@", +- visibility = ["//visibility:private"], +-) +diff --git a/third_party/py/non_hermetic/numpy/README.md b/third_party/py/non_hermetic/numpy/README.md +deleted file mode 100644 +index 4e58b9df87b..00000000000 +--- a/third_party/py/non_hermetic/numpy/README.md ++++ /dev/null +@@ -1,4 +0,0 @@ +-# numpy_ops +- +-The folder tf_numpy_api/ contains lists of NumPy API symbols that the +-`numpy_ops` internal module in TensorFlow implements. +diff --git a/third_party/py/non_hermetic/numpy/tf_numpy_api/BUILD b/third_party/py/non_hermetic/numpy/tf_numpy_api/BUILD +deleted file mode 100644 +index 070f8ab8a65..00000000000 +--- a/third_party/py/non_hermetic/numpy/tf_numpy_api/BUILD ++++ /dev/null +@@ -1,12 +0,0 @@ +-# TensorFlow API backwards compatibility test goldens for tf.experimental.numpy. +- +-package( +- # copybara:uncomment default_applicable_licenses = ["//tensorflow:license"], +- default_visibility = ["//visibility:public"], +- licenses = ["notice"], +-) +- +-filegroup( +- name = "api_golden", +- srcs = glob(["*.pbtxt"]), +-) +diff --git a/third_party/py/non_hermetic/numpy/tf_numpy_api/tensorflow.experimental.numpy.ndarray.pbtxt b/third_party/py/non_hermetic/numpy/tf_numpy_api/tensorflow.experimental.numpy.ndarray.pbtxt +deleted file mode 100644 +index 9198264c029..00000000000 +--- a/third_party/py/non_hermetic/numpy/tf_numpy_api/tensorflow.experimental.numpy.ndarray.pbtxt ++++ /dev/null +@@ -1,51 +0,0 @@ +-path: "tensorflow.experimental.numpy.ndarray" +-tf_class { +- is_instance: "<class \'tensorflow.python.framework.tensor.Tensor\'>" +- is_instance: "<class \'tensorflow.python.types.internal.NativeObject\'>" +- is_instance: "<class \'tensorflow.python.types.core.Symbol\'>" +- is_instance: "<class \'tensorflow.python.types.core.Tensor\'>" +- is_instance: "<type \'object\'>" +- member { +- name: "OVERLOADABLE_OPERATORS" +- mtype: "<type \'set\'>" +- } +- member { +- name: "dtype" +- mtype: "<type \'property\'>" +- } +- member { +- name: "name" +- mtype: "<type \'property\'>" +- } +- member { +- name: "ndim" +- mtype: "<type \'property\'>" +- } +- member { +- name: "shape" +- mtype: "<type \'property\'>" +- } +- member_method { +- name: "__init__" +- } +- member_method { +- name: "eval" +- argspec: "args=[\'self\', \'feed_dict\', \'session\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], " +- } +- member_method { +- name: "experimental_ref" +- argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "get_shape" +- argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "ref" +- argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "set_shape" +- argspec: "args=[\'self\', \'shape\'], varargs=None, keywords=None, defaults=None" +- } +-} +diff --git a/third_party/py/non_hermetic/numpy/tf_numpy_api/tensorflow.experimental.numpy.pbtxt b/third_party/py/non_hermetic/numpy/tf_numpy_api/tensorflow.experimental.numpy.pbtxt +deleted file mode 100644 +index 2f5490ad0c9..00000000000 +--- a/third_party/py/non_hermetic/numpy/tf_numpy_api/tensorflow.experimental.numpy.pbtxt ++++ /dev/null +@@ -1,919 +0,0 @@ +-path: "tensorflow.experimental.numpy" +-tf_module { +- member { +- name: "bool_" +- mtype: "<type \'type\'>" +- } +- member { +- name: "complex128" +- mtype: "<type \'type\'>" +- } +- member { +- name: "complex64" +- mtype: "<type \'type\'>" +- } +- member { +- name: "complex_" +- mtype: "<type \'type\'>" +- } +- member { +- name: "e" +- mtype: "<class \'float\'>" +- } +- member { +- name: "float16" +- mtype: "<type \'type\'>" +- } +- member { +- name: "float32" +- mtype: "<type \'type\'>" +- } +- member { +- name: "float64" +- mtype: "<type \'type\'>" +- } +- member { +- name: "float_" +- mtype: "<type \'type\'>" +- } +- member { +- name: "iinfo" +- mtype: "<type \'type\'>" +- } +- member { +- name: "inexact" +- mtype: "<type \'type\'>" +- } +- member { +- name: "inf" +- mtype: "<class \'float\'>" +- } +- member { +- name: "int16" +- mtype: "<type \'type\'>" +- } +- member { +- name: "int32" +- mtype: "<type \'type\'>" +- } +- member { +- name: "int64" +- mtype: "<type \'type\'>" +- } +- member { +- name: "int8" +- mtype: "<type \'type\'>" +- } +- member { +- name: "int_" +- mtype: "<type \'type\'>" +- } +- member { +- name: "ndarray" +- mtype: "<type \'type\'>" +- } +- member { +- name: "newaxis" +- mtype: "<type \'NoneType\'>" +- } +- member { +- name: "object_" +- mtype: "<type \'type\'>" +- } +- member { +- name: "pi" +- mtype: "<class \'float\'>" +- } +- member { +- name: "random" +- mtype: "<type \'module\'>" +- } +- member { +- name: "string_" +- mtype: "<type \'type\'>" +- } +- member { +- name: "uint16" +- mtype: "<type \'type\'>" +- } +- member { +- name: "uint32" +- mtype: "<type \'type\'>" +- } +- member { +- name: "uint64" +- mtype: "<type \'type\'>" +- } +- member { +- name: "uint8" +- mtype: "<type \'type\'>" +- } +- member { +- name: "unicode_" +- mtype: "<type \'type\'>" +- } +- member_method { +- name: "abs" +- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "absolute" +- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "add" +- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "all" +- argspec: "args=[\'a\', \'axis\', \'keepdims\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], " +- } +- member_method { +- name: "allclose" +- argspec: "args=[\'a\', \'b\', \'rtol\', \'atol\', \'equal_nan\'], varargs=None, keywords=None, defaults=[\'1e-05\', \'1e-08\', \'False\'], " +- } +- member_method { +- name: "amax" +- argspec: "args=[\'a\', \'axis\', \'out\', \'keepdims\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\'], " +- } +- member_method { +- name: "amin" +- argspec: "args=[\'a\', \'axis\', \'out\', \'keepdims\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\'], " +- } +- member_method { +- name: "angle" +- argspec: "args=[\'z\', \'deg\'], varargs=None, keywords=None, defaults=[\'False\'], " +- } +- member_method { +- name: "any" +- argspec: "args=[\'a\', \'axis\', \'keepdims\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], " +- } +- member_method { +- name: "append" +- argspec: "args=[\'arr\', \'values\', \'axis\'], varargs=None, keywords=None, defaults=[\'None\'], " +- } +- member_method { +- name: "arange" +- argspec: "args=[\'start\', \'stop\', \'step\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\', \'1\', \'None\'], " +- } +- member_method { +- name: "arccos" +- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "arccosh" +- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "arcsin" +- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "arcsinh" +- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "arctan" +- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "arctan2" +- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "arctanh" +- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "argmax" +- argspec: "args=[\'a\', \'axis\'], varargs=None, keywords=None, defaults=[\'None\'], " +- } +- member_method { +- name: "argmin" +- argspec: "args=[\'a\', \'axis\'], varargs=None, keywords=None, defaults=[\'None\'], " +- } +- member_method { +- name: "argsort" +- argspec: "args=[\'a\', \'axis\', \'kind\', \'order\'], varargs=None, keywords=None, defaults=[\'-1\', \'quicksort\', \'None\'], " +- } +- member_method { +- name: "around" +- argspec: "args=[\'a\', \'decimals\'], varargs=None, keywords=None, defaults=[\'0\'], " +- } +- member_method { +- name: "array" +- argspec: "args=[\'val\', \'dtype\', \'copy\', \'ndmin\'], varargs=None, keywords=None, defaults=[\'None\', \'True\', \'0\'], " +- } +- member_method { +- name: "array_equal" +- argspec: "args=[\'a1\', \'a2\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "asanyarray" +- argspec: "args=[\'a\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\'], " +- } +- member_method { +- name: "asarray" +- argspec: "args=[\'a\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\'], " +- } +- member_method { +- name: "ascontiguousarray" +- argspec: "args=[\'a\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\'], " +- } +- member_method { +- name: "atleast_1d" +- argspec: "args=[], varargs=arys, keywords=None, defaults=None" +- } +- member_method { +- name: "atleast_2d" +- argspec: "args=[], varargs=arys, keywords=None, defaults=None" +- } +- member_method { +- name: "atleast_3d" +- argspec: "args=[], varargs=arys, keywords=None, defaults=None" +- } +- member_method { +- name: "average" +- argspec: "args=[\'a\', \'axis\', \'weights\', \'returned\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'False\'], " +- } +- member_method { +- name: "bitwise_and" +- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "bitwise_not" +- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "bitwise_or" +- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "bitwise_xor" +- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "broadcast_arrays" +- argspec: "args=[], varargs=args, keywords=kwargs, defaults=None" +- } +- member_method { +- name: "broadcast_to" +- argspec: "args=[\'array\', \'shape\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "cbrt" +- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "ceil" +- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "clip" +- argspec: "args=[\'a\', \'a_min\', \'a_max\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "compress" +- argspec: "args=[\'condition\', \'a\', \'axis\'], varargs=None, keywords=None, defaults=[\'None\'], " +- } +- member_method { +- name: "concatenate" +- argspec: "args=[\'arys\', \'axis\'], varargs=None, keywords=None, defaults=[\'0\'], " +- } +- member_method { +- name: "conj" +- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "conjugate" +- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "copy" +- argspec: "args=[\'a\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "cos" +- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "cosh" +- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "count_nonzero" +- argspec: "args=[\'a\', \'axis\'], varargs=None, keywords=None, defaults=[\'None\'], " +- } +- member_method { +- name: "cross" +- argspec: "args=[\'a\', \'b\', \'axisa\', \'axisb\', \'axisc\', \'axis\'], varargs=None, keywords=None, defaults=[\'-1\', \'-1\', \'-1\', \'None\'], " +- } +- member_method { +- name: "cumprod" +- argspec: "args=[\'a\', \'axis\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], " +- } +- member_method { +- name: "cumsum" +- argspec: "args=[\'a\', \'axis\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], " +- } +- member_method { +- name: "deg2rad" +- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "diag" +- argspec: "args=[\'v\', \'k\'], varargs=None, keywords=None, defaults=[\'0\'], " +- } +- member_method { +- name: "diag_indices" +- argspec: "args=[\'n\', \'ndim\'], varargs=None, keywords=None, defaults=[\'2\'], " +- } +- member_method { +- name: "diagflat" +- argspec: "args=[\'v\', \'k\'], varargs=None, keywords=None, defaults=[\'0\'], " +- } +- member_method { +- name: "diagonal" +- argspec: "args=[\'a\', \'offset\', \'axis1\', \'axis2\'], varargs=None, keywords=None, defaults=[\'0\', \'0\', \'1\'], " +- } +- member_method { +- name: "diff" +- argspec: "args=[\'a\', \'n\', \'axis\'], varargs=None, keywords=None, defaults=[\'1\', \'-1\'], " +- } +- member_method { +- name: "divide" +- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "divmod" +- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "dot" +- argspec: "args=[\'a\', \'b\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "dsplit" +- argspec: "args=[\'ary\', \'indices_or_sections\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "dstack" +- argspec: "args=[\'tup\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "einsum" +- argspec: "args=[\'subscripts\'], varargs=operands, keywords=kwargs, defaults=None" +- } +- member_method { +- name: "empty" +- argspec: "args=[\'shape\', \'dtype\'], varargs=None, keywords=None, defaults=[\"<class \'float\'>\"], " +- } +- member_method { +- name: "empty_like" +- argspec: "args=[\'a\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\'], " +- } +- member_method { +- name: "equal" +- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "exp" +- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "exp2" +- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "expand_dims" +- argspec: "args=[\'a\', \'axis\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "experimental_enable_numpy_behavior" +- argspec: "args=[\'prefer_float32\'], varargs=None, keywords=None, defaults=[\'False\'], " +- } +- member_method { +- name: "expm1" +- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "eye" +- argspec: "args=[\'N\', \'M\', \'k\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\', \'0\', \"<class \'float\'>\"], " +- } +- member_method { +- name: "fabs" +- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "finfo" +- argspec: "args=[\'dtype\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "fix" +- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "flatten" +- argspec: "args=[\'a\', \'order\'], varargs=None, keywords=None, defaults=[\'C\'], " +- } +- member_method { +- name: "flip" +- argspec: "args=[\'m\', \'axis\'], varargs=None, keywords=None, defaults=[\'None\'], " +- } +- member_method { +- name: "fliplr" +- argspec: "args=[\'m\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "flipud" +- argspec: "args=[\'m\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "float_power" +- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "floor" +- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "floor_divide" +- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "full" +- argspec: "args=[\'shape\', \'fill_value\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\'], " +- } +- member_method { +- name: "full_like" +- argspec: "args=[\'a\', \'fill_value\', \'dtype\', \'order\', \'subok\', \'shape\'], varargs=None, keywords=None, defaults=[\'None\', \'K\', \'True\', \'None\'], " +- } +- member_method { +- name: "gcd" +- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "geomspace" +- argspec: "args=[\'start\', \'stop\', \'num\', \'endpoint\', \'dtype\', \'axis\'], varargs=None, keywords=None, defaults=[\'50\', \'True\', \'None\', \'0\'], " +- } +- member_method { +- name: "greater" +- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "greater_equal" +- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "heaviside" +- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "hsplit" +- argspec: "args=[\'ary\', \'indices_or_sections\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "hstack" +- argspec: "args=[\'tup\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "hypot" +- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "identity" +- argspec: "args=[\'n\', \'dtype\'], varargs=None, keywords=None, defaults=[\"<class \'float\'>\"], " +- } +- member_method { +- name: "imag" +- argspec: "args=[\'val\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "inner" +- argspec: "args=[\'a\', \'b\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "isclose" +- argspec: "args=[\'a\', \'b\', \'rtol\', \'atol\', \'equal_nan\'], varargs=None, keywords=None, defaults=[\'1e-05\', \'1e-08\', \'False\'], " +- } +- member_method { +- name: "iscomplex" +- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "iscomplexobj" +- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "isfinite" +- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "isinf" +- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "isnan" +- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "isneginf" +- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "isposinf" +- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "isreal" +- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "isrealobj" +- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "isscalar" +- argspec: "args=[\'num\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "issubdtype" +- argspec: "args=[\'arg1\', \'arg2\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "ix_" +- argspec: "args=[], varargs=args, keywords=None, defaults=None" +- } +- member_method { +- name: "kron" +- argspec: "args=[\'a\', \'b\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "lcm" +- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "less" +- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "less_equal" +- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "linspace" +- argspec: "args=[\'start\', \'stop\', \'num\', \'endpoint\', \'retstep\', \'dtype\', \'axis\'], varargs=None, keywords=None, defaults=[\'50\', \'True\', \'False\', \"<class \'float\'>\", \'0\'], " +- } +- member_method { +- name: "log" +- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "log10" +- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "log1p" +- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "log2" +- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "logaddexp" +- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "logaddexp2" +- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "logical_and" +- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "logical_not" +- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "logical_or" +- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "logical_xor" +- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "logspace" +- argspec: "args=[\'start\', \'stop\', \'num\', \'endpoint\', \'base\', \'dtype\', \'axis\'], varargs=None, keywords=None, defaults=[\'50\', \'True\', \'10.0\', \'None\', \'0\'], " +- } +- member_method { +- name: "matmul" +- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "max" +- argspec: "args=[\'a\', \'axis\', \'keepdims\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], " +- } +- member_method { +- name: "maximum" +- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "mean" +- argspec: "args=[\'a\', \'axis\', \'dtype\', \'out\', \'keepdims\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\', \'None\'], " +- } +- member_method { +- name: "meshgrid" +- argspec: "args=[], varargs=xi, keywords=kwargs, defaults=None" +- } +- member_method { +- name: "min" +- argspec: "args=[\'a\', \'axis\', \'keepdims\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], " +- } +- member_method { +- name: "minimum" +- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "mod" +- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "moveaxis" +- argspec: "args=[\'a\', \'source\', \'destination\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "multiply" +- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "nanmean" +- argspec: "args=[\'a\', \'axis\', \'dtype\', \'keepdims\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\'], " +- } +- member_method { +- name: "nanprod" +- argspec: "args=[\'a\', \'axis\', \'dtype\', \'keepdims\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'False\'], " +- } +- member_method { +- name: "nansum" +- argspec: "args=[\'a\', \'axis\', \'dtype\', \'keepdims\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'False\'], " +- } +- member_method { +- name: "ndim" +- argspec: "args=[\'a\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "negative" +- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "nextafter" +- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "nonzero" +- argspec: "args=[\'a\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "not_equal" +- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "ones" +- argspec: "args=[\'shape\', \'dtype\'], varargs=None, keywords=None, defaults=[\"<class \'float\'>\"], " +- } +- member_method { +- name: "ones_like" +- argspec: "args=[\'a\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\'], " +- } +- member_method { +- name: "outer" +- argspec: "args=[\'a\', \'b\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "pad" +- argspec: "args=[\'array\', \'pad_width\', \'mode\'], varargs=None, keywords=kwargs, defaults=None" +- } +- member_method { +- name: "polyval" +- argspec: "args=[\'p\', \'x\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "positive" +- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "power" +- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "prod" +- argspec: "args=[\'a\', \'axis\', \'dtype\', \'keepdims\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\'], " +- } +- member_method { +- name: "promote_types" +- argspec: "args=[\'type1\', \'type2\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "ptp" +- argspec: "args=[\'a\', \'axis\', \'keepdims\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], " +- } +- member_method { +- name: "rad2deg" +- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "ravel" +- argspec: "args=[\'a\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "real" +- argspec: "args=[\'val\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "reciprocal" +- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "remainder" +- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "repeat" +- argspec: "args=[\'a\', \'repeats\', \'axis\'], varargs=None, keywords=None, defaults=[\'None\'], " +- } +- member_method { +- name: "reshape" +- argspec: "args=[\'a\', \'newshape\', \'order\'], varargs=None, keywords=None, defaults=[\'C\'], " +- } +- member_method { +- name: "result_type" +- argspec: "args=[], varargs=arrays_and_dtypes, keywords=None, defaults=None" +- } +- member_method { +- name: "roll" +- argspec: "args=[\'a\', \'shift\', \'axis\'], varargs=None, keywords=None, defaults=[\'None\'], " +- } +- member_method { +- name: "rot90" +- argspec: "args=[\'m\', \'k\', \'axes\'], varargs=None, keywords=None, defaults=[\'1\', \'(0, 1)\'], " +- } +- member_method { +- name: "round" +- argspec: "args=[\'a\', \'decimals\'], varargs=None, keywords=None, defaults=[\'0\'], " +- } +- member_method { +- name: "select" +- argspec: "args=[\'condlist\', \'choicelist\', \'default\'], varargs=None, keywords=None, defaults=[\'0\'], " +- } +- member_method { +- name: "shape" +- argspec: "args=[\'a\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "sign" +- argspec: "args=[\'x\', \'out\', \'where\'], varargs=None, keywords=kwargs, defaults=[\'None\', \'None\'], " +- } +- member_method { +- name: "signbit" +- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "sin" +- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "sinc" +- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "sinh" +- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "size" +- argspec: "args=[\'x\', \'axis\'], varargs=None, keywords=None, defaults=[\'None\'], " +- } +- member_method { +- name: "sort" +- argspec: "args=[\'a\', \'axis\', \'kind\', \'order\'], varargs=None, keywords=None, defaults=[\'-1\', \'quicksort\', \'None\'], " +- } +- member_method { +- name: "split" +- argspec: "args=[\'ary\', \'indices_or_sections\', \'axis\'], varargs=None, keywords=None, defaults=[\'0\'], " +- } +- member_method { +- name: "sqrt" +- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "square" +- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "squeeze" +- argspec: "args=[\'a\', \'axis\'], varargs=None, keywords=None, defaults=[\'None\'], " +- } +- member_method { +- name: "stack" +- argspec: "args=[\'arrays\', \'axis\'], varargs=None, keywords=None, defaults=[\'0\'], " +- } +- member_method { +- name: "std" +- argspec: "args=[\'a\', \'axis\', \'keepdims\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], " +- } +- member_method { +- name: "subtract" +- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "sum" +- argspec: "args=[\'a\', \'axis\', \'dtype\', \'keepdims\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\'], " +- } +- member_method { +- name: "swapaxes" +- argspec: "args=[\'a\', \'axis1\', \'axis2\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "take" +- argspec: "args=[\'a\', \'indices\', \'axis\', \'out\', \'mode\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'clip\'], " +- } +- member_method { +- name: "take_along_axis" +- argspec: "args=[\'arr\', \'indices\', \'axis\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "tan" +- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "tanh" +- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "tensordot" +- argspec: "args=[\'a\', \'b\', \'axes\'], varargs=None, keywords=None, defaults=[\'2\'], " +- } +- member_method { +- name: "tile" +- argspec: "args=[\'a\', \'reps\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "trace" +- argspec: "args=[\'a\', \'offset\', \'axis1\', \'axis2\', \'dtype\'], varargs=None, keywords=None, defaults=[\'0\', \'0\', \'1\', \'None\'], " +- } +- member_method { +- name: "transpose" +- argspec: "args=[\'a\', \'axes\'], varargs=None, keywords=None, defaults=[\'None\'], " +- } +- member_method { +- name: "tri" +- argspec: "args=[\'N\', \'M\', \'k\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\', \'0\', \'None\'], " +- } +- member_method { +- name: "tril" +- argspec: "args=[\'m\', \'k\'], varargs=None, keywords=None, defaults=[\'0\'], " +- } +- member_method { +- name: "triu" +- argspec: "args=[\'m\', \'k\'], varargs=None, keywords=None, defaults=[\'0\'], " +- } +- member_method { +- name: "true_divide" +- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "vander" +- argspec: "args=[\'x\', \'N\', \'increasing\'], varargs=None, keywords=None, defaults=[\'None\', \'False\'], " +- } +- member_method { +- name: "var" +- argspec: "args=[\'a\', \'axis\', \'dtype\', \'out\', \'ddof\', \'keepdims\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\', \'0\', \'None\'], " +- } +- member_method { +- name: "vdot" +- argspec: "args=[\'a\', \'b\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "vsplit" +- argspec: "args=[\'ary\', \'indices_or_sections\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "vstack" +- argspec: "args=[\'tup\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "where" +- argspec: "args=[\'condition\', \'x\', \'y\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], " +- } +- member_method { +- name: "zeros" +- argspec: "args=[\'shape\', \'dtype\'], varargs=None, keywords=None, defaults=[\"<class \'float\'>\"], " +- } +- member_method { +- name: "zeros_like" +- argspec: "args=[\'a\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\'], " +- } +-} +diff --git a/third_party/py/non_hermetic/numpy/tf_numpy_api/tensorflow.experimental.numpy.random.pbtxt b/third_party/py/non_hermetic/numpy/tf_numpy_api/tensorflow.experimental.numpy.random.pbtxt +deleted file mode 100644 +index 61a4766f3f8..00000000000 +--- a/third_party/py/non_hermetic/numpy/tf_numpy_api/tensorflow.experimental.numpy.random.pbtxt ++++ /dev/null +@@ -1,35 +0,0 @@ +-path: "tensorflow.experimental.numpy.random" +-tf_module { +- member_method { +- name: "poisson" +- argspec: "args=[\'lam\', \'size\'], varargs=None, keywords=None, defaults=[\'1.0\', \'None\'], " +- } +- member_method { +- name: "rand" +- argspec: "args=[], varargs=size, keywords=None, defaults=None" +- } +- member_method { +- name: "randint" +- argspec: "args=[\'low\', \'high\', \'size\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \"<class \'numpy.int64\'>\"], " +- } +- member_method { +- name: "randn" +- argspec: "args=[], varargs=args, keywords=None, defaults=None" +- } +- member_method { +- name: "random" +- argspec: "args=[\'size\'], varargs=None, keywords=None, defaults=[\'None\'], " +- } +- member_method { +- name: "seed" +- argspec: "args=[\'s\'], varargs=None, keywords=None, defaults=None" +- } +- member_method { +- name: "standard_normal" +- argspec: "args=[\'size\'], varargs=None, keywords=None, defaults=[\'None\'], " +- } +- member_method { +- name: "uniform" +- argspec: "args=[\'low\', \'high\', \'size\'], varargs=None, keywords=None, defaults=[\'0.0\', \'1.0\', \'None\'], " +- } +-} +diff --git a/third_party/py/non_hermetic/python_configure.bzl b/third_party/py/non_hermetic/python_configure.bzl +deleted file mode 100644 +index 300cbfb6c71..00000000000 +--- a/third_party/py/non_hermetic/python_configure.bzl ++++ /dev/null +@@ -1,315 +0,0 @@ +-"""Repository rule for Python autoconfiguration. +- +-`python_configure` depends on the following environment variables: +- +- * `PYTHON_BIN_PATH`: location of python binary. +- * `PYTHON_LIB_PATH`: Location of python libraries. +-""" +- +-load( +- "//third_party/remote_config:common.bzl", +- "BAZEL_SH", +- "PYTHON_BIN_PATH", +- "PYTHON_LIB_PATH", +- "TF_PYTHON_CONFIG_REPO", +- "auto_config_fail", +- "config_repo_label", +- "execute", +- "get_bash_bin", +- "get_host_environ", +- "get_python_bin", +- "is_windows", +- "raw_exec", +- "read_dir", +-) +- +-def _genrule(src_dir, genrule_name, command, outs): +- """Returns a string with a genrule. +- +- Genrule executes the given command and produces the given outputs. +- """ +- return ( +- "genrule(\n" + +- ' name = "' + +- genrule_name + '",\n' + +- " outs = [\n" + +- outs + +- "\n ],\n" + +- ' cmd = """\n' + +- command + +- '\n """,\n' + +- ")\n" +- ) +- +-def _norm_path(path): +- """Returns a path with '/' and remove the trailing slash.""" +- path = path.replace("\\", "/") +- if path[-1] == "/": +- path = path[:-1] +- return path +- +-def _symlink_genrule_for_dir( +- repository_ctx, +- src_dir, +- dest_dir, +- genrule_name, +- src_files = [], +- dest_files = []): +- """Returns a genrule to symlink(or copy if on Windows) a set of files. +- +- If src_dir is passed, files will be read from the given directory; otherwise +- we assume files are in src_files and dest_files +- """ +- if src_dir != None: +- src_dir = _norm_path(src_dir) +- dest_dir = _norm_path(dest_dir) +- files = "\n".join(read_dir(repository_ctx, src_dir)) +- +- # Create a list with the src_dir stripped to use for outputs. +- dest_files = files.replace(src_dir, "").splitlines() +- src_files = files.splitlines() +- command = [] +- outs = [] +- for i in range(len(dest_files)): +- if dest_files[i] != "": +- # If we have only one file to link we do not want to use the dest_dir, as +- # $(@D) will include the full path to the file. +- dest = "$(@D)/" + dest_dir + dest_files[i] if len(dest_files) != 1 else "$(@D)/" + dest_files[i] +- +- # Copy the headers to create a sandboxable setup. +- cmd = "cp -f" +- command.append(cmd + ' "%s" "%s"' % (src_files[i], dest)) +- outs.append(' "' + dest_dir + dest_files[i] + '",') +- genrule = _genrule( +- src_dir, +- genrule_name, +- " && ".join(command), +- "\n".join(outs), +- ) +- return genrule +- +-def _get_python_lib(repository_ctx, python_bin): +- """Gets the python lib path.""" +- python_lib = get_host_environ(repository_ctx, PYTHON_LIB_PATH) +- if python_lib != None: +- return python_lib +- +- # The interesting program to execute. +- print_lib = [ +- "from __future__ import print_function", +- "import site", +- "import os", +- "python_paths = []", +- "if os.getenv('PYTHONPATH') is not None:", +- " python_paths = os.getenv('PYTHONPATH').split(':')", +- "try:", +- " library_paths = site.getsitepackages()", +- "except AttributeError:", +- " from distutils.sysconfig import get_python_lib", +- " library_paths = [get_python_lib()]", +- "all_paths = set(python_paths + library_paths)", +- "paths = []", +- "for path in all_paths:", +- " if os.path.isdir(path):", +- " paths.append(path)", +- "if len(paths) >=1:", +- " print(paths[0])", +- ] +- +- # The below script writes the above program to a file +- # and executes it. This is to work around the limitation +- # of not being able to upload files as part of execute. +- cmd = "from os import linesep;" +- cmd += "f = open('script.py', 'w');" +- for line in print_lib: +- cmd += "f.write(\"%s\" + linesep);" % line +- cmd += "f.close();" +- cmd += "from subprocess import call;" +- cmd += "call([\"%s\", \"script.py\"]);" % python_bin +- +- result = execute(repository_ctx, [python_bin, "-c", cmd]) +- return result.stdout.strip() +- +-def _check_python_lib(repository_ctx, python_lib): +- """Checks the python lib path.""" +- cmd = 'test -d "%s" -a -x "%s"' % (python_lib, python_lib) +- result = raw_exec(repository_ctx, [get_bash_bin(repository_ctx), "-c", cmd]) +- if result.return_code == 1: +- auto_config_fail("Invalid python library path: %s" % python_lib) +- +-def _check_python_bin(repository_ctx, python_bin): +- """Checks the python bin path.""" +- cmd = '[[ -x "%s" ]] && [[ ! -d "%s" ]]' % (python_bin, python_bin) +- result = raw_exec(repository_ctx, [get_bash_bin(repository_ctx), "-c", cmd]) +- if result.return_code == 1: +- auto_config_fail("--define %s='%s' is not executable. Is it the python binary?" % ( +- PYTHON_BIN_PATH, +- python_bin, +- )) +- +-def _get_python_include(repository_ctx, python_bin): +- """Gets the python include path.""" +- result = execute( +- repository_ctx, +- [ +- python_bin, +- "-Wignore", +- "-c", +- "import importlib; " + +- "import importlib.util; " + +- "print(importlib.import_module('distutils.sysconfig').get_python_inc() " + +- "if importlib.util.find_spec('distutils.sysconfig') " + +- "else importlib.import_module('sysconfig').get_path('include'))", +- ], +- error_msg = "Problem getting python include path.", +- error_details = ("Is the Python binary path set up right? " + +- "(See ./configure or " + PYTHON_BIN_PATH + ".) " + +- "Is distutils installed?"), +- ) +- return result.stdout.splitlines()[0] +- +-def _get_python_import_lib_name(repository_ctx, python_bin): +- """Get Python import library name (pythonXY.lib) on Windows.""" +- result = execute( +- repository_ctx, +- [ +- python_bin, +- "-c", +- "import sys;" + +- 'print("python" + str(sys.version_info[0]) + ' + +- ' str(sys.version_info[1]) + ".lib")', +- ], +- error_msg = "Problem getting python import library.", +- error_details = ("Is the Python binary path set up right? " + +- "(See ./configure or " + PYTHON_BIN_PATH + ".) "), +- ) +- return result.stdout.splitlines()[0] +- +-def _get_numpy_include(repository_ctx, python_bin): +- """Gets the numpy include path.""" +- return execute( +- repository_ctx, +- [ +- python_bin, +- "-c", +- "from __future__ import print_function;" + +- "import numpy;" + +- " print(numpy.get_include());", +- ], +- error_msg = "Problem getting numpy include path.", +- error_details = "Is numpy installed?", +- ).stdout.splitlines()[0] +- +-def _create_local_python_repository(repository_ctx): +- """Creates the repository containing files set up to build with Python.""" +- +- # Resolve all labels before doing any real work. Resolving causes the +- # function to be restarted with all previous state being lost. This +- # can easily lead to a O(n^2) runtime in the number of labels. +- build_tpl = repository_ctx.path(Label("//third_party/py:BUILD.tpl")) +- +- python_bin = get_python_bin(repository_ctx) +- _check_python_bin(repository_ctx, python_bin) +- python_lib = _get_python_lib(repository_ctx, python_bin) +- _check_python_lib(repository_ctx, python_lib) +- python_include = _get_python_include(repository_ctx, python_bin) +- numpy_include = _get_numpy_include(repository_ctx, python_bin) + "/numpy" +- python_include_rule = _symlink_genrule_for_dir( +- repository_ctx, +- python_include, +- "python_include", +- "python_include", +- ) +- python_import_lib_genrule = "" +- +- # To build Python C/C++ extension on Windows, we need to link to python import library pythonXY.lib +- # See https://docs.python.org/3/extending/windows.html +- if is_windows(repository_ctx): +- python_bin = python_bin.replace("\\", "/") +- python_include = _norm_path(python_include) +- python_import_lib_name = _get_python_import_lib_name(repository_ctx, python_bin) +- python_import_lib_src = python_include.rsplit("/", 1)[0] + "/libs/" + python_import_lib_name +- python_import_lib_genrule = _symlink_genrule_for_dir( +- repository_ctx, +- None, +- "", +- "python_import_lib", +- [python_import_lib_src], +- [python_import_lib_name], +- ) +- numpy_include_rule = _symlink_genrule_for_dir( +- repository_ctx, +- numpy_include, +- "numpy_include/numpy", +- "numpy_include", +- ) +- +- platform_constraint = "" +- if repository_ctx.attr.platform_constraint: +- platform_constraint = "\"%s\"" % repository_ctx.attr.platform_constraint +- repository_ctx.template("BUILD", build_tpl, { +- "%{PYTHON_BIN_PATH}": python_bin, +- "%{PYTHON_INCLUDE_GENRULE}": python_include_rule, +- "%{PYTHON_IMPORT_LIB_GENRULE}": python_import_lib_genrule, +- "%{NUMPY_INCLUDE_GENRULE}": numpy_include_rule, +- "%{PLATFORM_CONSTRAINT}": platform_constraint, +- }) +- +-def _create_remote_python_repository(repository_ctx, remote_config_repo): +- """Creates pointers to a remotely configured repo set up to build with Python. +- """ +- repository_ctx.template("BUILD", config_repo_label(remote_config_repo, ":BUILD"), {}) +- +-def _python_autoconf_impl(repository_ctx): +- """Implementation of the python_autoconf repository rule.""" +- if get_host_environ(repository_ctx, TF_PYTHON_CONFIG_REPO) != None: +- _create_remote_python_repository( +- repository_ctx, +- get_host_environ(repository_ctx, TF_PYTHON_CONFIG_REPO), +- ) +- else: +- _create_local_python_repository(repository_ctx) +- +-_ENVIRONS = [ +- BAZEL_SH, +- PYTHON_BIN_PATH, +- PYTHON_LIB_PATH, +-] +- +-local_python_configure = repository_rule( +- implementation = _create_local_python_repository, +- environ = _ENVIRONS, +- attrs = { +- "environ": attr.string_dict(), +- "platform_constraint": attr.string(), +- }, +-) +- +-remote_python_configure = repository_rule( +- implementation = _create_local_python_repository, +- environ = _ENVIRONS, +- remotable = True, +- attrs = { +- "environ": attr.string_dict(), +- "platform_constraint": attr.string(), +- }, +-) +- +-python_configure = repository_rule( +- implementation = _python_autoconf_impl, +- environ = _ENVIRONS + [TF_PYTHON_CONFIG_REPO], +- attrs = { +- "platform_constraint": attr.string(), +- }, +-) +-"""Detects and configures the local Python. +- +-Add the following to your WORKSPACE FILE: +- +-```python +-python_configure(name = "local_config_python") +-``` +- +-Args: +- name: A unique name for this workspace rule. +-""" +diff --git a/third_party/py/numpy/BUILD b/third_party/py/numpy/BUILD +index 97c7907fc38..c80cc5287bc 100644 +--- a/third_party/py/numpy/BUILD ++++ b/third_party/py/numpy/BUILD +@@ -2,14 +2,15 @@ licenses(["restricted"]) + + package(default_visibility = ["//visibility:public"]) + +-alias( ++py_library( + name = "numpy", +- actual = "@pypi_numpy//:pkg", ++ srcs = ["tf_numpy_dummy.py"], ++ srcs_version = "PY3", + ) + + alias( + name = "headers", +- actual = "@pypi_numpy//:numpy_headers", ++ actual = "@local_config_python//:numpy_headers", + ) + + genrule( +diff --git a/third_party/py/numpy/LICENSE b/third_party/py/numpy/LICENSE +deleted file mode 100644 +index b9731f734f5..00000000000 +--- a/third_party/py/numpy/LICENSE ++++ /dev/null +@@ -1,60 +0,0 @@ +-Copyright (c) 2005-2019, NumPy Developers. +-All rights reserved. +- +-Redistribution and use in source and binary forms, with or without +-modification, are permitted provided that the following conditions are +-met: +- +- * Redistributions of source code must retain the above copyright +- notice, this list of conditions and the following disclaimer. +- +- * Redistributions in binary form must reproduce the above +- copyright notice, this list of conditions and the following +- disclaimer in the documentation and/or other materials provided +- with the distribution. +- +- * Neither the name of the NumPy Developers nor the names of any +- contributors may be used to endorse or promote products derived +- from this software without specific prior written permission. +- +-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +-LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +-A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +-OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +-SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +-LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +-DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +-THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +- +- +- +-The NumPy repository and source distributions bundle several libraries that are +-compatibly licensed. We list these here. +- +-Name: Numpydoc +-Files: doc/sphinxext/numpydoc/* +-License: 2-clause BSD +- For details, see doc/sphinxext/LICENSE.txt +- +-Name: scipy-sphinx-theme +-Files: doc/scipy-sphinx-theme/* +-License: 3-clause BSD, PSF and Apache 2.0 +- For details, see doc/scipy-sphinx-theme/LICENSE.txt +- +-Name: lapack-lite +-Files: numpy/linalg/lapack_lite/* +-License: 3-clause BSD +- For details, see numpy/linalg/lapack_lite/LICENSE.txt +- +-Name: tempita +-Files: tools/npy_tempita/* +-License: BSD derived +- For details, see tools/npy_tempita/license.txt +- +-Name: dragon4 +-Files: numpy/core/src/multiarray/dragon4.c +-License: One of a kind +- For license text, see numpy/core/src/multiarray/dragon4.c +diff --git a/third_party/py/numpy/tf_numpy_api/tensorflow.experimental.numpy.pbtxt b/third_party/py/numpy/tf_numpy_api/tensorflow.experimental.numpy.pbtxt +index 05939b53b5f..2f5490ad0c9 100644 +--- a/third_party/py/numpy/tf_numpy_api/tensorflow.experimental.numpy.pbtxt ++++ b/third_party/py/numpy/tf_numpy_api/tensorflow.experimental.numpy.pbtxt +@@ -390,7 +390,7 @@ tf_module { + } + member_method { + name: "experimental_enable_numpy_behavior" +- argspec: "args=[\'prefer_float32\', \'dtype_conversion_mode\'], varargs=None, keywords=None, defaults=[\'False\', \'legacy\'], " ++ argspec: "args=[\'prefer_float32\'], varargs=None, keywords=None, defaults=[\'False\'], " + } + member_method { + name: "expm1" +diff --git a/third_party/py/python_configure.bzl b/third_party/py/python_configure.bzl +index 3728a91b931..300cbfb6c71 100644 +--- a/third_party/py/python_configure.bzl ++++ b/third_party/py/python_configure.bzl +@@ -1,4 +1,9 @@ + """Repository rule for Python autoconfiguration. ++ ++`python_configure` depends on the following environment variables: ++ ++ * `PYTHON_BIN_PATH`: location of python binary. ++ * `PYTHON_LIB_PATH`: Location of python libraries. + """ + + load( +@@ -6,8 +11,195 @@ load( + "BAZEL_SH", + "PYTHON_BIN_PATH", + "PYTHON_LIB_PATH", ++ "TF_PYTHON_CONFIG_REPO", ++ "auto_config_fail", ++ "config_repo_label", ++ "execute", ++ "get_bash_bin", ++ "get_host_environ", ++ "get_python_bin", ++ "is_windows", ++ "raw_exec", ++ "read_dir", + ) + ++def _genrule(src_dir, genrule_name, command, outs): ++ """Returns a string with a genrule. ++ ++ Genrule executes the given command and produces the given outputs. ++ """ ++ return ( ++ "genrule(\n" + ++ ' name = "' + ++ genrule_name + '",\n' + ++ " outs = [\n" + ++ outs + ++ "\n ],\n" + ++ ' cmd = """\n' + ++ command + ++ '\n """,\n' + ++ ")\n" ++ ) ++ ++def _norm_path(path): ++ """Returns a path with '/' and remove the trailing slash.""" ++ path = path.replace("\\", "/") ++ if path[-1] == "/": ++ path = path[:-1] ++ return path ++ ++def _symlink_genrule_for_dir( ++ repository_ctx, ++ src_dir, ++ dest_dir, ++ genrule_name, ++ src_files = [], ++ dest_files = []): ++ """Returns a genrule to symlink(or copy if on Windows) a set of files. ++ ++ If src_dir is passed, files will be read from the given directory; otherwise ++ we assume files are in src_files and dest_files ++ """ ++ if src_dir != None: ++ src_dir = _norm_path(src_dir) ++ dest_dir = _norm_path(dest_dir) ++ files = "\n".join(read_dir(repository_ctx, src_dir)) ++ ++ # Create a list with the src_dir stripped to use for outputs. ++ dest_files = files.replace(src_dir, "").splitlines() ++ src_files = files.splitlines() ++ command = [] ++ outs = [] ++ for i in range(len(dest_files)): ++ if dest_files[i] != "": ++ # If we have only one file to link we do not want to use the dest_dir, as ++ # $(@D) will include the full path to the file. ++ dest = "$(@D)/" + dest_dir + dest_files[i] if len(dest_files) != 1 else "$(@D)/" + dest_files[i] ++ ++ # Copy the headers to create a sandboxable setup. ++ cmd = "cp -f" ++ command.append(cmd + ' "%s" "%s"' % (src_files[i], dest)) ++ outs.append(' "' + dest_dir + dest_files[i] + '",') ++ genrule = _genrule( ++ src_dir, ++ genrule_name, ++ " && ".join(command), ++ "\n".join(outs), ++ ) ++ return genrule ++ ++def _get_python_lib(repository_ctx, python_bin): ++ """Gets the python lib path.""" ++ python_lib = get_host_environ(repository_ctx, PYTHON_LIB_PATH) ++ if python_lib != None: ++ return python_lib ++ ++ # The interesting program to execute. ++ print_lib = [ ++ "from __future__ import print_function", ++ "import site", ++ "import os", ++ "python_paths = []", ++ "if os.getenv('PYTHONPATH') is not None:", ++ " python_paths = os.getenv('PYTHONPATH').split(':')", ++ "try:", ++ " library_paths = site.getsitepackages()", ++ "except AttributeError:", ++ " from distutils.sysconfig import get_python_lib", ++ " library_paths = [get_python_lib()]", ++ "all_paths = set(python_paths + library_paths)", ++ "paths = []", ++ "for path in all_paths:", ++ " if os.path.isdir(path):", ++ " paths.append(path)", ++ "if len(paths) >=1:", ++ " print(paths[0])", ++ ] ++ ++ # The below script writes the above program to a file ++ # and executes it. This is to work around the limitation ++ # of not being able to upload files as part of execute. ++ cmd = "from os import linesep;" ++ cmd += "f = open('script.py', 'w');" ++ for line in print_lib: ++ cmd += "f.write(\"%s\" + linesep);" % line ++ cmd += "f.close();" ++ cmd += "from subprocess import call;" ++ cmd += "call([\"%s\", \"script.py\"]);" % python_bin ++ ++ result = execute(repository_ctx, [python_bin, "-c", cmd]) ++ return result.stdout.strip() ++ ++def _check_python_lib(repository_ctx, python_lib): ++ """Checks the python lib path.""" ++ cmd = 'test -d "%s" -a -x "%s"' % (python_lib, python_lib) ++ result = raw_exec(repository_ctx, [get_bash_bin(repository_ctx), "-c", cmd]) ++ if result.return_code == 1: ++ auto_config_fail("Invalid python library path: %s" % python_lib) ++ ++def _check_python_bin(repository_ctx, python_bin): ++ """Checks the python bin path.""" ++ cmd = '[[ -x "%s" ]] && [[ ! -d "%s" ]]' % (python_bin, python_bin) ++ result = raw_exec(repository_ctx, [get_bash_bin(repository_ctx), "-c", cmd]) ++ if result.return_code == 1: ++ auto_config_fail("--define %s='%s' is not executable. Is it the python binary?" % ( ++ PYTHON_BIN_PATH, ++ python_bin, ++ )) ++ ++def _get_python_include(repository_ctx, python_bin): ++ """Gets the python include path.""" ++ result = execute( ++ repository_ctx, ++ [ ++ python_bin, ++ "-Wignore", ++ "-c", ++ "import importlib; " + ++ "import importlib.util; " + ++ "print(importlib.import_module('distutils.sysconfig').get_python_inc() " + ++ "if importlib.util.find_spec('distutils.sysconfig') " + ++ "else importlib.import_module('sysconfig').get_path('include'))", ++ ], ++ error_msg = "Problem getting python include path.", ++ error_details = ("Is the Python binary path set up right? " + ++ "(See ./configure or " + PYTHON_BIN_PATH + ".) " + ++ "Is distutils installed?"), ++ ) ++ return result.stdout.splitlines()[0] ++ ++def _get_python_import_lib_name(repository_ctx, python_bin): ++ """Get Python import library name (pythonXY.lib) on Windows.""" ++ result = execute( ++ repository_ctx, ++ [ ++ python_bin, ++ "-c", ++ "import sys;" + ++ 'print("python" + str(sys.version_info[0]) + ' + ++ ' str(sys.version_info[1]) + ".lib")', ++ ], ++ error_msg = "Problem getting python import library.", ++ error_details = ("Is the Python binary path set up right? " + ++ "(See ./configure or " + PYTHON_BIN_PATH + ".) "), ++ ) ++ return result.stdout.splitlines()[0] ++ ++def _get_numpy_include(repository_ctx, python_bin): ++ """Gets the numpy include path.""" ++ return execute( ++ repository_ctx, ++ [ ++ python_bin, ++ "-c", ++ "from __future__ import print_function;" + ++ "import numpy;" + ++ " print(numpy.get_include());", ++ ], ++ error_msg = "Problem getting numpy include path.", ++ error_details = "Is numpy installed?", ++ ).stdout.splitlines()[0] ++ + def _create_local_python_repository(repository_ctx): + """Creates the repository containing files set up to build with Python.""" + +@@ -15,14 +207,68 @@ def _create_local_python_repository(repository_ctx): + # function to be restarted with all previous state being lost. This + # can easily lead to a O(n^2) runtime in the number of labels. + build_tpl = repository_ctx.path(Label("//third_party/py:BUILD.tpl")) ++ ++ python_bin = get_python_bin(repository_ctx) ++ _check_python_bin(repository_ctx, python_bin) ++ python_lib = _get_python_lib(repository_ctx, python_bin) ++ _check_python_lib(repository_ctx, python_lib) ++ python_include = _get_python_include(repository_ctx, python_bin) ++ numpy_include = _get_numpy_include(repository_ctx, python_bin) + "/numpy" ++ python_include_rule = _symlink_genrule_for_dir( ++ repository_ctx, ++ python_include, ++ "python_include", ++ "python_include", ++ ) ++ python_import_lib_genrule = "" ++ ++ # To build Python C/C++ extension on Windows, we need to link to python import library pythonXY.lib ++ # See https://docs.python.org/3/extending/windows.html ++ if is_windows(repository_ctx): ++ python_bin = python_bin.replace("\\", "/") ++ python_include = _norm_path(python_include) ++ python_import_lib_name = _get_python_import_lib_name(repository_ctx, python_bin) ++ python_import_lib_src = python_include.rsplit("/", 1)[0] + "/libs/" + python_import_lib_name ++ python_import_lib_genrule = _symlink_genrule_for_dir( ++ repository_ctx, ++ None, ++ "", ++ "python_import_lib", ++ [python_import_lib_src], ++ [python_import_lib_name], ++ ) ++ numpy_include_rule = _symlink_genrule_for_dir( ++ repository_ctx, ++ numpy_include, ++ "numpy_include/numpy", ++ "numpy_include", ++ ) ++ + platform_constraint = "" + if repository_ctx.attr.platform_constraint: + platform_constraint = "\"%s\"" % repository_ctx.attr.platform_constraint +- repository_ctx.template("BUILD", build_tpl, {"%{PLATFORM_CONSTRAINT}": platform_constraint}) ++ repository_ctx.template("BUILD", build_tpl, { ++ "%{PYTHON_BIN_PATH}": python_bin, ++ "%{PYTHON_INCLUDE_GENRULE}": python_include_rule, ++ "%{PYTHON_IMPORT_LIB_GENRULE}": python_import_lib_genrule, ++ "%{NUMPY_INCLUDE_GENRULE}": numpy_include_rule, ++ "%{PLATFORM_CONSTRAINT}": platform_constraint, ++ }) ++ ++def _create_remote_python_repository(repository_ctx, remote_config_repo): ++ """Creates pointers to a remotely configured repo set up to build with Python. ++ """ ++ repository_ctx.template("BUILD", config_repo_label(remote_config_repo, ":BUILD"), {}) + + def _python_autoconf_impl(repository_ctx): + """Implementation of the python_autoconf repository rule.""" +- _create_local_python_repository(repository_ctx) ++ if get_host_environ(repository_ctx, TF_PYTHON_CONFIG_REPO) != None: ++ _create_remote_python_repository( ++ repository_ctx, ++ get_host_environ(repository_ctx, TF_PYTHON_CONFIG_REPO), ++ ) ++ else: ++ _create_local_python_repository(repository_ctx) + + _ENVIRONS = [ + BAZEL_SH, +@@ -32,6 +278,7 @@ _ENVIRONS = [ + + local_python_configure = repository_rule( + implementation = _create_local_python_repository, ++ environ = _ENVIRONS, + attrs = { + "environ": attr.string_dict(), + "platform_constraint": attr.string(), +@@ -50,6 +297,7 @@ remote_python_configure = repository_rule( + + python_configure = repository_rule( + implementation = _python_autoconf_impl, ++ environ = _ENVIRONS + [TF_PYTHON_CONFIG_REPO], + attrs = { + "platform_constraint": attr.string(), + }, +-- +2.41.0 + diff --git a/sci-libs/tensorflow/files/tensorflow-2.14.1-0013-installation-remove-cp_local_config_python.patch b/sci-libs/tensorflow/files/tensorflow-2.14.1-0013-installation-remove-cp_local_config_python.patch new file mode 100644 index 000000000000..d6c502878849 --- /dev/null +++ b/sci-libs/tensorflow/files/tensorflow-2.14.1-0013-installation-remove-cp_local_config_python.patch @@ -0,0 +1,68 @@ +From 9a0eb9b34277229370d8df8407e4b99c13a6da0f Mon Sep 17 00:00:00 2001 +From: wangjiezhe <wangjiezhe@gmail.com> +Date: Fri, 22 Dec 2023 20:25:52 +0800 +Subject: [PATCH 13/13] installation: remove `cp_local_config_python` + +Revert https://github.com/tensorflow/tensorflow/commit/a034b3d48a9d3dbccff22800ab4b435a89f45103 +--- + .../tools/pip_package/build_pip_package.sh | 25 ------------------- + 1 file changed, 25 deletions(-) + +diff --git a/tensorflow/tools/pip_package/build_pip_package.sh b/tensorflow/tools/pip_package/build_pip_package.sh +index 4a2d42bba58..af76ca4d978 100755 +--- a/tensorflow/tools/pip_package/build_pip_package.sh ++++ b/tensorflow/tools/pip_package/build_pip_package.sh +@@ -47,22 +47,6 @@ function cp_external() { + cp "${src_dir}/local_config_cuda/cuda/cuda/cuda_config.h" "${dest_dir}/local_config_cuda/cuda/cuda/" + } + +-function cp_local_config_python() { +- local src_dir=$1 +- local dest_dir=$2 +- pushd . +- cd "$src_dir" +- mkdir -p "${dest_dir}/local_config_python/numpy_include/" +- cp -r "pypi_numpy/site-packages/numpy/core/include/numpy" "${dest_dir}/local_config_python/numpy_include/" +- mkdir -p "${dest_dir}/local_config_python/python_include/" +- if is_windows; then +- cp -r python_*/include/* "${dest_dir}/local_config_python/python_include/" +- else +- cp -r python_*/include/python*/* "${dest_dir}/local_config_python/python_include/" +- fi +- popd +-} +- + function copy_xla_aot_runtime_sources() { + local src_dir=$1 + local dst_dir=$2 +@@ -174,9 +158,6 @@ function prepare_src() { + cp_external \ + bazel-bin/tensorflow/tools/pip_package/build_pip_package.exe.runfiles \ + "${EXTERNAL_INCLUDES}/" +- cp_local_config_python \ +- bazel-bin/tensorflow/tools/pip_package/build_pip_package.exe.runfiles \ +- "${EXTERNAL_INCLUDES}/" + copy_xla_aot_runtime_sources \ + bazel-bin/tensorflow/tools/pip_package/build_pip_package.exe.runfiles/org_tensorflow \ + "${XLA_AOT_RUNTIME_SOURCES}/" +@@ -220,17 +201,11 @@ function prepare_src() { + cp_external \ + bazel-bin/tensorflow/tools/pip_package/build_pip_package.runfiles/org_tensorflow/external \ + "${EXTERNAL_INCLUDES}" +- cp_local_config_python \ +- bazel-bin/tensorflow/tools/pip_package/build_pip_package.runfiles/org_tensorflow/external \ +- "${EXTERNAL_INCLUDES}" + else + # New-style runfiles structure (--nolegacy_external_runfiles). + cp_external \ + bazel-bin/tensorflow/tools/pip_package/build_pip_package.runfiles \ + "${EXTERNAL_INCLUDES}" +- cp_local_config_python \ +- bazel-bin/tensorflow/tools/pip_package/build_pip_package.runfiles \ +- "${EXTERNAL_INCLUDES}" + fi + copy_xla_aot_runtime_sources \ + bazel-bin/tensorflow/tools/pip_package/build_pip_package.runfiles/org_tensorflow \ +-- +2.41.0 + diff --git a/sci-libs/tensorflow/tensorflow-2.14.1.ebuild b/sci-libs/tensorflow/tensorflow-2.14.1.ebuild new file mode 100644 index 000000000000..5e4117a37653 --- /dev/null +++ b/sci-libs/tensorflow/tensorflow-2.14.1.ebuild @@ -0,0 +1,446 @@ +# Copyright 1999-2024 Gentoo Authors +# Distributed under the terms of the GNU General Public License v2 + +EAPI=8 + +DISTUTILS_OPTIONAL=1 +PYTHON_COMPAT=( python3_{10..11} ) +MY_PV=${PV/_rc/-rc} +MY_P=${PN}-${MY_PV} +DEP_VER="$(ver_cut 1-2)" + +inherit bazel check-reqs cuda distutils-r1 flag-o-matic prefix toolchain-funcs + +DESCRIPTION="Computation framework using data flow graphs for scalable machine learning" +HOMEPAGE="https://www.tensorflow.org/" + +RESTRICT="test" # Tests need GPU access +LICENSE="Apache-2.0" +SLOT="0" +KEYWORDS="~amd64" +IUSE="cuda mpi +python xla" +CPU_USE_FLAGS_X86="sse sse2 sse3 sse4_1 sse4_2 avx avx2 fma3 fma4" +for i in $CPU_USE_FLAGS_X86; do + IUSE+=" cpu_flags_x86_${i}" +done + +# distfiles that bazel uses for the workspace, will be copied to basel-distdir +# pkgcheck complains but do NOT change the .zip to .tar.gz, bazel requires the exact tarball (basename and sha256). +# the build will fail if different archives are used. +bazel_external_uris=" + https://github.com/Maratyszcza/FP16/archive/4dfe081cf6bcd15db339cf2680b9281b8451eeb3.zip -> FP16-4dfe081cf6bcd15db339cf2680b9281b8451eeb3.zip + https://github.com/Maratyszcza/FXdiv/archive/63058eff77e11aa15bf531df5dd34395ec3017c8.zip -> FXdiv-63058eff77e11aa15bf531df5dd34395ec3017c8.zip + https://github.com/Maratyszcza/pthreadpool/archive/b8374f80e42010941bda6c85b0e3f1a1bd77a1e0.zip -> pthreadpool-b8374f80e42010941bda6c85b0e3f1a1bd77a1e0.zip + https://github.com/bazelbuild/apple_support/releases/download/1.6.0/apple_support.1.6.0.tar.gz + https://github.com/bazelbuild/bazel-skylib/releases/download/1.3.0/bazel-skylib-1.3.0.tar.gz + https://github.com/bazelbuild/bazel-toolchains/archive/8c717f8258cd5f6c7a45b97d974292755852b658.tar.gz -> bazel-toolchains-8c717f8258cd5f6c7a45b97d974292755852b658.tar.gz + https://github.com/bazelbuild/platforms/releases/download/0.0.6/platforms-0.0.6.tar.gz -> bazelbuild-platforms-0.0.6.tar.gz + https://github.com/bazelbuild/rules_android/archive/v0.1.1.zip -> bazelbuild-rules_android-v0.1.1.zip + https://github.com/bazelbuild/rules_apple/releases/download/2.3.0/rules_apple.2.3.0.tar.gz + https://github.com/bazelbuild/rules_cc/archive/081771d4a0e9d7d3aa0eed2ef389fa4700dfb23e.tar.gz -> bazelbuild-rules_cc-081771d4a0e9d7d3aa0eed2ef389fa4700dfb23e.tar.gz + https://github.com/bazelbuild/rules_closure/archive/308b05b2419edb5c8ee0471b67a40403df940149.tar.gz -> bazelbuild-rules_closure-308b05b2419edb5c8ee0471b67a40403df940149.tar.gz + https://github.com/bazelbuild/rules_docker/releases/download/v0.10.0/rules_docker-v0.10.0.tar.gz -> bazelbuild-rules_docker-v0.10.0.tar.gz + https://github.com/bazelbuild/rules_foreign_cc/archive/0.7.1.tar.gz -> bazelbuild-rules_foreign_cc-0.7.1.tar.gz + https://github.com/bazelbuild/rules_java/archive/7cf3cefd652008d0a64a419c34c13bdca6c8f178.zip -> bazelbuild-rules_java-7cf3cefd652008d0a64a419c34c13bdca6c8f178.zip + https://github.com/bazelbuild/rules_java/releases/download/5.5.1/rules_java-5.5.1.tar.gz -> bazelbuild-rules_java-5.5.1.tar.gz + https://github.com/bazelbuild/rules_jvm_external/archive/4.3.zip -> bazelbuild-rules_jvm_external-4.3.zip + https://github.com/bazelbuild/rules_pkg/releases/download/0.7.1/rules_pkg-0.7.1.tar.gz -> bazelbuild-rules_pkg-0.7.1.tar.gz + https://github.com/bazelbuild/rules_proto/archive/11bf7c25e666dd7ddacbcd4d4c4a9de7a25175f8.tar.gz -> bazelbuild-rules_proto-11bf7c25e666dd7ddacbcd4d4c4a9de7a25175f8.tar.gz + https://github.com/bazelbuild/rules_python/releases/download/0.1.0/rules_python-0.1.0.tar.gz -> bazelbuild-rules_python-0.1.0.tar.gz + https://github.com/bazelbuild/rules_swift/releases/download/1.0.0/rules_swift.1.0.0.tar.gz -> bazelbuild-rules_swift.1.0.0.tar.gz + https://github.com/dmlc/dlpack/archive/9351cf542ab478499294864ff3acfdab5c8c5f3d.tar.gz -> dlpack-9351cf542ab478499294864ff3acfdab5c8c5f3d.tar.gz + https://github.com/google/XNNPACK/archive/b9d4073a6913891ce9cbd8965c8d506075d2a45a.zip -> XNNPACK-b9d4073a6913891ce9cbd8965c8d506075d2a45a.zip + https://github.com/google/benchmark/archive/f7547e29ccaed7b64ef4f7495ecfff1c9f6f3d03.tar.gz -> benchmark-f7547e29ccaed7b64ef4f7495ecfff1c9f6f3d03.tar.gz + https://github.com/google/farmhash/archive/0d859a811870d10f53a594927d0d0b97573ad06d.tar.gz -> farmhash-0d859a811870d10f53a594927d0d0b97573ad06d.tar.gz + https://github.com/google/gemmlowp/archive/e844ffd17118c1e17d94e1ba4354c075a4577b88.zip -> gemmlowp-e844ffd17118c1e17d94e1ba4354c075a4577b88.zip + https://github.com/google/highwayhash/archive/c13d28517a4db259d738ea4886b1f00352a3cc33.tar.gz -> highwayhash-c13d28517a4db259d738ea4886b1f00352a3cc33.tar.gz + https://github.com/google/re2/archive/03da4fc0857c285e3a26782f6bc8931c4c950df4.tar.gz -> re2-03da4fc0857c285e3a26782f6bc8931c4c950df4.tar.gz + https://github.com/google/ruy/archive/3286a34cc8de6149ac6844107dfdffac91531e72.zip -> ruy-3286a34cc8de6149ac6844107dfdffac91531e72.zip + https://github.com/googleapis/googleapis/archive/6b3fdcea8bc5398be4e7e9930c693f0ea09316a0.tar.gz -> googleapis-6b3fdcea8bc5398be4e7e9930c693f0ea09316a0.tar.gz + https://github.com/jax-ml/ml_dtypes/archive/5b9fc9ad978757654843f4a8d899715dbea30e88/ml_dtypes-5b9fc9ad978757654843f4a8d899715dbea30e88.tar.gz + https://github.com/joe-kuo/sobol_data/archive/835a7d7b1ee3bc83e575e302a985c66ec4b65249.tar.gz -> sobol_data-835a7d7b1ee3bc83e575e302a985c66ec4b65249.tar.gz + https://github.com/llvm/llvm-project/archive/668e33c6401abe7844691fb7d47a3cf2d2012dbc.tar.gz -> llvm-project-668e33c6401abe7844691fb7d47a3cf2d2012dbc.tar.gz + https://github.com/llvm/llvm-project/releases/download/llvmorg-10.0.1/openmp-10.0.1.src.tar.xz -> llvmorg-10.0.1-openmp-10.0.1.src.tar.xz + https://github.com/mborgerding/kissfft/archive/131.1.0.tar.gz -> kissfft-131.1.0.tar.gz + https://github.com/oneapi-src/oneDNN/archive/refs/tags/v3.2.1.tar.gz -> oneDNN-v3.2.1.tar.gz + https://github.com/openxla/stablehlo/archive/9ae6c373a6e2941ff84a8831bb3724728cb2b49a.zip -> openxla-stablehlo-9ae6c373a6e2941ff84a8831bb3724728cb2b49a.zip + https://github.com/openxla/triton/archive/cl546794996.tar.gz -> openxla-triton-cl546794996.tar.gz + https://github.com/petewarden/OouraFFT/archive/v1.0.tar.gz -> OouraFFT-v1.0.tar.gz + https://github.com/protocolbuffers/protobuf/archive/v3.21.9.zip -> protobuf-3.21.9.zip + https://github.com/pybind/pybind11_abseil/archive/2c4932ed6f6204f1656e245838f4f5eae69d2e29.tar.gz -> pybind11_abseil-2c4932ed6f6204f1656e245838f4f5eae69d2e29.tar.gz + https://github.com/pybind/pybind11_bazel/archive/72cbbf1fbc830e487e3012862b7b720001b70672.tar.gz -> pybind11_bazel-72cbbf1fbc830e487e3012862b7b720001b70672.tar.gz + https://github.com/pybind/pybind11_protobuf/archive/80f3440cd8fee124e077e2e47a8a17b78b451363.zip -> pybind11_protobuf-80f3440cd8fee124e077e2e47a8a17b78b451363.zip + https://github.com/pytorch/cpuinfo/archive/87d8234510367db49a65535021af5e1838a65ac2.tar.gz -> pytorch-cpuinfo-87d8234510367db49a65535021af5e1838a65ac2.tar.gz + https://github.com/pytorch/cpuinfo/archive/87d8234510367db49a65535021af5e1838a65ac2.zip -> pytorch-cpuinfo-87d8234510367db49a65535021af5e1838a65ac2.zip + https://github.com/tensorflow/runtime/archive/769f5cc9b8732933140b09e8808d13614182b496.tar.gz -> tensorflow-runtime-769f5cc9b8732933140b09e8808d13614182b496.tar.gz + https://gitlab.com/libeigen/eigen/-/archive/0b51f763cbbd0ed08168f88972724329f0375498/eigen-0b51f763cbbd0ed08168f88972724329f0375498.tar.gz + cuda? ( + https://github.com/NVIDIA/cudnn-frontend/archive/refs/tags/v0.9.zip -> cudnn-frontend-v0.9.zip + https://github.com/NVlabs/cub/archive/1.9.9.zip -> cub-1.9.9.zip + https://github.com/nvidia/nccl/archive/v2.16.5-1.tar.gz -> nvidia-nccl-v2.16.5-1.tar.gz + ) + python? ( + https://github.com/intel/ARM_NEON_2_x86_SSE/archive/a15b489e1222b2087007546b4912e21293ea86ff.tar.gz -> ARM_NEON_2_x86_SSE-a15b489e1222b2087007546b4912e21293ea86ff.tar.gz + https://storage.googleapis.com/mirror.tensorflow.org/docs.python.org/2.7/_sources/license.rst.txt -> tensorflow-1.15.0-python-license.rst.txt + )" + +SRC_URI="https://github.com/${PN}/${PN}/archive/v${MY_PV}.tar.gz -> ${P}.tar.gz + ${bazel_external_uris}" + +# abseil-cpp-20211102.0-r0 does not work with NVCC +# check flatbuffers version in tensorflow/lite/schema/schema_generated.h +RDEPEND=" + app-arch/snappy + =dev-cpp/abseil-cpp-20230125.2*:= + dev-db/sqlite + dev-libs/double-conversion + dev-libs/icu:= + >=dev-libs/jsoncpp-1.9.2:= + >=dev-libs/nsync-1.25.0 + dev-libs/openssl:0= + >=dev-libs/protobuf-3.13.0:= + >=dev-libs/re2-0.2019.06.01:= + media-libs/giflib + media-libs/libjpeg-turbo + media-libs/libpng:0 + >=net-libs/grpc-1.28:= + net-misc/curl + sys-libs/zlib + >=sys-apps/hwloc-2:= + cuda? ( + dev-util/nvidia-cuda-toolkit:=[profiler] + =dev-libs/cudnn-8* + ) + mpi? ( virtual/mpi ) + python? ( + ${PYTHON_DEPS} + ~dev-libs/flatbuffers-23.5.26:= + dev-python/absl-py[${PYTHON_USEDEP}] + >=dev-python/astor-0.7.1[${PYTHON_USEDEP}] + dev-python/astunparse[${PYTHON_USEDEP}] + dev-python/clang-python[${PYTHON_USEDEP}] + dev-python/dill[${PYTHON_USEDEP}] + ~dev-python/flatbuffers-23.5.26[${PYTHON_USEDEP}] + >=dev-python/gast-0.3.3[${PYTHON_USEDEP}] + dev-python/h5py[${PYTHON_USEDEP}] + <dev-python/ml_dtypes-0.3.0[${PYTHON_USEDEP}] + >=dev-python/numpy-1.19[${PYTHON_USEDEP}] + >=dev-python/google-pasta-0.1.8[${PYTHON_USEDEP}] + >=dev-python/opt-einsum-3.3.0[${PYTHON_USEDEP}] + >=dev-python/protobuf-python-3.13.0[${PYTHON_USEDEP}] + dev-python/pybind11[${PYTHON_USEDEP}] + dev-python/six[${PYTHON_USEDEP}] + dev-python/tblib[${PYTHON_USEDEP}] + dev-python/termcolor[${PYTHON_USEDEP}] + dev-python/typing-extensions[${PYTHON_USEDEP}] + >=dev-python/grpcio-1.28[${PYTHON_USEDEP}] + >=dev-python/wrapt-1.11.1[${PYTHON_USEDEP}] + >=net-libs/google-cloud-cpp-0.10.0 + =sci-visualization/tensorboard-${DEP_VER}*[${PYTHON_USEDEP}] + )" +DEPEND="${RDEPEND} + python? ( + dev-python/mock + dev-python/setuptools + )" +PDEPEND="python? ( + =sci-libs/keras-${DEP_VER}*[${PYTHON_USEDEP}] + =sci-libs/tensorflow-estimator-${DEP_VER}*[${PYTHON_USEDEP}] + )" +# >=dev-libs/protobuf-3.8.0 +BDEPEND=" + app-arch/unzip + =dev-build/bazel-6* + <dev-build/bazel-6.3 + dev-java/java-config + cuda? ( + >=dev-util/nvidia-cuda-toolkit-9.1[profiler] + ) + !python? ( dev-lang/python ) + python? ( + dev-python/cython + dev-python/mock + >=dev-python/grpcio-tools-1.28 + ) + dev-util/patchelf" +REQUIRED_USE="python? ( ${PYTHON_REQUIRED_USE} )" + +S="${WORKDIR}/${MY_P}" + +DOCS=( AUTHORS CONTRIBUTING.md ISSUE_TEMPLATE.md README.md RELEASE.md ) +CHECKREQS_MEMORY="5G" +CHECKREQS_DISK_BUILD="10G" + +PATCHES=( + "${FILESDIR}/${P}-0001-WORKSPACE-add-rules-docker-http_archive-bazel-toolch.patch" + "${FILESDIR}/${P}-0002-systemlib-Latest-absl-LTS-has-split-cord-libs.patch" + "${FILESDIR}/${P}-0003-mkl_dnn-Must-link-against-libm-for-round-and-log2.patch" + "${FILESDIR}/${P}-0004-tensorflow_cc-Add-systemlib-nsync-linkopts.patch" + "${FILESDIR}/${P}-0005-systemlib-Updates-for-Abseil-20220623-LTS.patch" + "${FILESDIR}/${P}-0006-systemlib-Update-targets-for-absl_py.patch" + "${FILESDIR}/${P}-0007-systemlib-Add-well_known_types_py_pb2-target.patch" + "${FILESDIR}/${P}-0008-Relax-setup.py-version-requirements.patch" + "${FILESDIR}/${P}-0009-systemlib-update-targets-for-absl.patch" + "${FILESDIR}/${P}-0010-systemlib-fix-missing-osx-in-pybind11.patch" + "${FILESDIR}/${P}-0011-systemlib-fix-missing-LICENSE-in-flatbuffers.patch" + "${FILESDIR}/${P}-0012-build-use-non-hermetic-python.patch" + "${FILESDIR}/${P}-0013-installation-remove-cp_local_config_python.patch" +) + +get-cpu-flags() { + local i f=() + # Keep this list in sync with tensorflow/core/platform/cpu_feature_guard.cc. + for i in sse sse2 sse3 sse4_1 sse4_2 avx avx2 fma4; do + use cpu_flags_x86_${i} && f+=( -m${i/_/.} ) + done + use cpu_flags_x86_fma3 && f+=( -mfma ) + echo "${f[*]}" +} + +pkg_setup() { + local num_pythons_enabled + num_pythons_enabled=0 + count_impls() { + num_pythons_enabled=$((${num_pythons_enabled} + 1)) + } + use python && python_foreach_impl count_impls + + # 10G to build C/C++ libs, 6G per python impl + CHECKREQS_DISK_BUILD="$((10 + 6 * ${num_pythons_enabled}))G" + check-reqs_pkg_setup +} + +src_unpack() { + # Only unpack the main distfile + unpack "${P}.tar.gz" + bazel_load_distfiles "${bazel_external_uris}" +} + +src_prepare() { + export JAVA_HOME=$(java-config --jre-home) # so keepwork works + export TF_PYTHON_VERSION="${EPYTHON/python/}" + + append-flags $(get-cpu-flags) + append-cxxflags -std=c++17 + export BUILD_CXXFLAGS+=" -std=c++17" + filter-flags '-fvtable-verify=@(std|preinit)' + bazel_setup_bazelrc + + # Relax version checks in setup.py + sed -i "/^ '/s/==/>=/g" tensorflow/tools/pip_package/setup.py || die + + # Prefixify hard-coded command locations + hprefixify -w /host_compiler_prefix/ third_party/gpus/cuda_configure.bzl + + default + use python && python_copy_sources + + use cuda && cuda_add_sandbox +} + +src_configure() { + export JAVA_HOME=$(java-config --jre-home) # so keepwork works + export KERAS_HOME="${T}/.keras" # otherwise sandbox violation writing ~/.keras + + do_configure() { + export CC_OPT_FLAGS=" " + export TF_ENABLE_XLA=$(usex xla 1 0) + export TF_NEED_OPENCL_SYCL=0 + export TF_NEED_OPENCL=0 + export TF_NEED_COMPUTECPP=0 + export TF_NEED_ROCM=0 + export TF_NEED_MPI=$(usex mpi 1 0) + export TF_SET_ANDROID_WORKSPACE=0 + + if use python; then + export PYTHON_BIN_PATH="${PYTHON}" + export PYTHON_LIB_PATH="$(python_get_sitedir)" + else + export PYTHON_BIN_PATH="$(which python)" + export PYTHON_LIB_PATH="$(python -c 'from distutils.sysconfig import *; print(get_python_lib())')" + fi + + export TF_NEED_CUDA=$(usex cuda 1 0) + export TF_DOWNLOAD_CLANG=0 + export TF_CUDA_CLANG=0 + export TF_NEED_TENSORRT=0 # $(usex cuda 1 0) + if use cuda; then + export TF_CUDA_PATHS="${EPREFIX}/opt/cuda" + export GCC_HOST_COMPILER_PATH="$(cuda_gccdir)/$(tc-getCC)" + export TF_CUDA_VERSION="$(cuda_toolkit_version)" + export TF_CUDNN_VERSION="$(cuda_cudnn_version)" + einfo "Setting CUDA version: $TF_CUDA_VERSION" + einfo "Setting CUDNN version: $TF_CUDNN_VERSION" + + if [[ $(cuda-config -s) != *$(gcc-version)* ]]; then + ewarn "TensorFlow is being built with Nvidia CUDA support. Your default compiler" + ewarn "version is not supported by the currently installed CUDA. TensorFlow will" + ewarn "instead be compiled using: ${GCC_HOST_COMPILER_PATH}." + ewarn "If the build fails with linker errors try rebuilding the relevant" + ewarn "dependencies using the same compiler version." + fi + + if [[ -z "$TF_CUDA_COMPUTE_CAPABILITIES" ]]; then + ewarn "WARNING: TensorFlow is being built with its default CUDA compute capabilities: 3.5 and 7.0." + ewarn "These may not be optimal for your GPU." + ewarn "" + ewarn "To configure TensorFlow with the CUDA compute capability that is optimal for your GPU," + ewarn "set TF_CUDA_COMPUTE_CAPABILITIES in your make.conf, and re-emerge tensorflow." + ewarn "For example, to use CUDA capability 7.5 & 3.5, add: TF_CUDA_COMPUTE_CAPABILITIES=7.5,3.5" + ewarn "" + ewarn "You can look up your GPU's CUDA compute capability at https://developer.nvidia.com/cuda-gpus" + ewarn "or by running /opt/cuda/extras/demo_suite/deviceQuery | grep 'CUDA Capability'" + fi + fi + + # com_googlesource_code_re2 weird branch using absl, doesnt work with released re2 + # com_google_protobuf is disabled due to https://github.com/tensorflow/tensorflow/issues/61593 + local SYSLIBS=( + absl_py + astor_archive + astunparse_archive + boringssl + com_github_googlecloudplatform_google_cloud_cpp + com_github_grpc_grpc + com_google_absl + # com_google_protobuf + curl + cython + dill_archive + double_conversion + flatbuffers + functools32_archive + gast_archive + gif + hwloc + icu + jsoncpp_git + libjpeg_turbo + nasm + nsync + opt_einsum_archive + org_sqlite + pasta + png + pybind11 + six_archive + snappy + tblib_archive + termcolor_archive + typing_extensions_archive + wrapt + zlib + ) + + export TF_SYSTEM_LIBS="${SYSLIBS[@]}" + export TF_IGNORE_MAX_BAZEL_VERSION=1 + + # This is not autoconf + ./configure || die + + echo 'build --config=noaws --config=nohdfs --config=nonccl' >> .bazelrc || die + echo 'build --define tensorflow_mkldnn_contraction_kernel=0' >> .bazelrc || die + echo "build --action_env=KERAS_HOME=\"${T}/.keras\"" >> .bazelrc || die + echo "build --host_action_env=KERAS_HOME=\"${T}/.keras\"" >> .bazelrc || die + + for cflag in $($(tc-getPKG_CONFIG) jsoncpp --cflags) + do + echo "build --copt=\"${cflag}\"" >> .bazelrc || die + echo "build --host_copt=\"${cflag}\"" >> .bazelrc || die + done + } + if use python; then + python_foreach_impl run_in_build_dir do_configure + else + do_configure + fi +} + +src_compile() { + export JAVA_HOME=$(java-config --jre-home) # so keepwork works + export KERAS_HOME="${T}/.keras" # otherwise sandbox violation writing ~/.keras + + if use python; then + python_setup + BUILD_DIR="${S}-${EPYTHON/./_}" + cd "${BUILD_DIR}" || die + fi + + # fail early if any deps are missing + ebazel build -k --nobuild \ + //tensorflow:libtensorflow_framework.so \ + //tensorflow:libtensorflow.so \ + //tensorflow:libtensorflow_cc.so \ + $(usex python '//tensorflow/tools/pip_package:build_pip_package' '') + + ebazel build \ + //tensorflow:libtensorflow_framework.so \ + //tensorflow:libtensorflow.so + ebazel build //tensorflow:libtensorflow_cc.so + ebazel build //tensorflow:install_headers + ebazel shutdown + + do_compile() { + ebazel build //tensorflow/tools/pip_package:build_pip_package + ebazel shutdown + } + BUILD_DIR="${S}" + cd "${BUILD_DIR}" || die + use python && python_foreach_impl run_in_build_dir do_compile +} + +src_install() { + local i l + export JAVA_HOME=$(java-config --jre-home) # so keepwork works + export KERAS_HOME="${T}/.keras" # otherwise sandbox violation writing ~/.keras + + do_install() { + einfo "Installing ${EPYTHON} files" + local srcdir="${T}/src-${MULTIBUILD_VARIANT}" + mkdir -p "${srcdir}" || die + bazel-bin/tensorflow/tools/pip_package/build_pip_package --src "${srcdir}" || die + cd "${srcdir}" || die + esetup.py install + + # libtensorflow_framework.so and libtensorflow_cc.so is in /usr/lib already + rm -f "${D}/$(python_get_sitedir)"/${PN}/lib${PN}_framework.so* || die + rm -f "${D}/$(python_get_sitedir)"/${PN}/lib${PN}_cc.so* || die + python_optimize + } + + if use python; then + python_foreach_impl run_in_build_dir do_install + + # Symlink to python-exec scripts + for i in "${ED}"/usr/lib/python-exec/*/*; do + n="${i##*/}" + [[ -e "${ED}/usr/bin/${n}" ]] || dosym ../lib/python-exec/python-exec2 "/usr/bin/${n}" + done + + python_setup + local BUILD_DIR="${S}-${EPYTHON/./_}" + cd "${BUILD_DIR}" || die + fi + + einfo "Installing headers" + insinto /usr/include/${PN}/ + doins -r bazel-bin/tensorflow/include/* + + einfo "Installing libs" + # Generate pkg-config file + ${PN}/c/generate-pc.sh --prefix="${EPREFIX}"/usr --libdir=$(get_libdir) --version=${MY_PV} || die + insinto /usr/$(get_libdir)/pkgconfig + doins ${PN}.pc ${PN}_cc.pc + + for l in libtensorflow{,_framework,_cc}.so; do + patchelf --add-rpath '/opt/cuda/lib64' bazel-bin/tensorflow/${l} + dolib.so bazel-bin/tensorflow/${l} + dolib.so bazel-bin/tensorflow/${l}.$(ver_cut 1) + dolib.so bazel-bin/tensorflow/${l}.$(ver_cut 1-3) + done + + einstalldocs + + # Workaround for https://bugs.gentoo.org/831927 + export MAKEOPTS="-j1" +} |