Compare commits

...

9 Commits

Author SHA1 Message Date
ed048616fe Set version to 0.10.4.dev0 (#169) 2025-10-16 20:21:35 +02:00
b182cd3458 feat: allow get_kernel to log telemetry. (#167)
* feat: allow get_kernel to log telemetry.

* Apply suggestions from code review

Co-authored-by: Daniël de Kok <me@danieldk.eu>

* doc

---------

Co-authored-by: Daniël de Kok <me@danieldk.eu>
2025-10-16 20:16:41 +02:00
ce77658efc fix: kernels upload to a repo branch (#168)
* fix: kernels upload to a repo branch

* up
2025-10-16 16:01:00 +02:00
b96b154e7f Avoid exception when detecting XPU on Torch <= 2.6 (#165)
torch.version has no xpu field in torch<=2.6

Signed-off-by: Wang, Yi A <yi.a.wang@intel.com>
2025-10-14 09:01:53 +02:00
b24ef9fa6b Set version to 0.10.3.dev0 (#164) 2025-10-13 17:23:39 +02:00
a7101b2cfd feat: allow kernels to be uploaded to a revision (#161)
* feat: allow kernels to be uploaded to a revision

* revision -> branch
2025-10-13 10:31:11 +02:00
6241afa06e Bump torch version in runner (#162)
* bump torch version

* run kernels lock tests/kernel_locking
2025-10-09 11:04:52 +02:00
34a1932751 Link local kernel and local/locked kernel API docs (#160) 2025-10-02 14:38:47 +02:00
e39eac09c1 up (#159) 2025-09-30 17:42:09 +02:00
12 changed files with 156 additions and 83 deletions

View File

@ -24,7 +24,7 @@ jobs:
max-parallel: 4
matrix:
python-version: ["3.10", "3.12"]
torch-version: ["2.6.0", "2.7.0"]
torch-version: ["2.7.0", "2.8.0"]
env:
UV_PYTHON_PREFERENCE: only-managed

View File

@ -6,6 +6,10 @@
[[autodoc]] kernels.get_kernel
### get_local_kernel
[[autodoc]] kernels.get_local_kernel
### has_kernel
[[autodoc]] kernels.has_kernel

View File

@ -39,3 +39,11 @@
### LayerRepository
[[autodoc]] kernels.LayerRepository
### LocalLayerRepository
[[autodoc]] kernels.LocalLayerRepository
### LockedLayerRepository
[[autodoc]] kernels.LockedLayerRepository

View File

@ -48,7 +48,7 @@ $ kernels to-wheel drbh/img2grey 1.1.2
### kernels upload
Use `kernels upload <dir_containing_build> --repo_id="hub-username/kernel"` to upload
your kernel builds to the Hub.
your kernel builds to the Hub. To know the supported arguments run: `kernels upload -h`.
**Notes**:

View File

@ -39,3 +39,13 @@ The approach of `forward`-replacement is the least invasive, because
it preserves the original model graph. It is also reversible, since
even though the `forward` of a layer _instance_ might be replaced,
the corresponding class still has the original `forward`.
## Misc
### How can I disable kernel reporting in the user-agent?
By default, we collect telemetry when a call to `get_kernel()` is made.
This only includes the `kernels` version, `torch` version, and the build
information for the kernel being requested.
You can disable this by setting `export DISABLE_TELEMETRY=yes`.

View File

@ -46,6 +46,16 @@ have dynamic library dependencies outside:
- Torch;
- CUDA/ROCm libraries installed as dependencies of Torch.
## Compatibility with torch.compile
The Kernel Hub also encourages to write the kernels in a `torch.compile`
compliant way. This helps to ensure that the kernels are compatible with
`torch.compile` without introducing any graph breaks and triggering
recompilation which can limit the benefits of compilation.
[Here](https://github.com/huggingface/kernel-builder/blob/d1ee9bf9301ac8c5199099d90ee1c9d5c789d5ba/examples/relu-backprop-compile/tests/test_relu.py#L162) is a simple test example which checks for graph breaks and
recompilation triggers during `torch.compile`.
### Linux
- Use [ABI3/Limited API](https://docs.python.org/3/c-api/stable.html#stable-application-binary-interface)

View File

@ -1,6 +1,6 @@
[project]
name = "kernels"
version = "0.10.2.dev0"
version = "0.10.4.dev0"
description = "Download compute kernels"
authors = [
{ name = "OlivierDehaene", email = "olivier@huggingface.co" },

View File

@ -1,18 +1,19 @@
from pathlib import Path
import sys
from pathlib import Path
from huggingface_hub import snapshot_download
from kernels.utils import CACHE_DIR
from kernel_abi_check import (
BinaryFormat,
IncompatibleMacOSVersion,
ObjectFile,
IncompatibleAbi3Symbol,
NonAbi3Symbol,
IncompatibleMacOSVersion,
IncompatibleManylinuxSymbol,
MissingMacOSVersion,
NonAbi3Symbol,
ObjectFile,
)
from kernels.utils import CACHE_DIR
def check_kernel(
*, macos: str, manylinux: str, python_abi: str, repo_id: str, revision: str

View File

@ -4,7 +4,7 @@ import json
import sys
from pathlib import Path
from huggingface_hub import create_repo, upload_folder
from huggingface_hub import create_repo, upload_folder, create_branch
from kernels.compat import tomllib
from kernels.lockfile import KernelLock, get_kernel_locks
@ -69,6 +69,11 @@ def main():
type=str,
help="Repository ID to use to upload to the Hugging Face Hub",
)
upload_parser.add_argument(
"--branch",
type=None,
help="If set, the upload will be made to a particular branch of the provided `repo_id`.",
)
upload_parser.add_argument(
"--private",
action="store_true",
@ -199,6 +204,7 @@ def lock_kernels(args):
def upload_kernels(args):
# Resolve `kernel_dir` to be uploaded.
kernel_dir = Path(args.kernel_dir).resolve()
build_dir = kernel_dir / "build"
if not kernel_dir.is_dir():
@ -210,6 +216,9 @@ def upload_kernels(args):
repo_id=args.repo_id, private=args.private, exist_ok=True
).repo_id
if args.branch is not None:
create_branch(repo_id=repo_id, branch=args.branch, exist_ok=True)
delete_patterns: set[str] = set()
for build_variant in build_dir.iterdir():
if build_variant.is_dir():
@ -218,6 +227,7 @@ def upload_kernels(args):
upload_folder(
repo_id=repo_id,
folder_path=build_dir,
revision=args.branch,
path_in_repo="build",
delete_patterns=list(delete_patterns),
commit_message="Build uploaded using `kernels`.",

View File

@ -11,7 +11,7 @@ import sys
from importlib.metadata import Distribution
from pathlib import Path
from types import ModuleType
from typing import Dict, List, Optional, Tuple
from typing import Dict, List, Optional, Tuple, Union
from huggingface_hub import file_exists, snapshot_download
from packaging.version import parse
@ -19,6 +19,8 @@ from packaging.version import parse
from kernels._versions import select_revision_or_version
from kernels.lockfile import KernelLock, VariantLock
ENV_VARS_TRUE_VALUES = {"1", "ON", "YES", "TRUE"}
def _get_cache_dir() -> Optional[str]:
"""Returns the kernels cache directory."""
@ -54,7 +56,7 @@ def build_variant() -> str:
compute_framework = f"rocm{rocm_version.major}{rocm_version.minor}"
elif torch.backends.mps.is_available():
compute_framework = "metal"
elif torch.version.xpu is not None:
elif hasattr(torch.version, "xpu") and torch.version.xpu is not None:
version = torch.version.xpu
compute_framework = f"xpu{version[0:4]}{version[5:6]}"
elif _get_privateuse_backend_name() == "npu":
@ -108,6 +110,7 @@ def install_kernel(
revision: str,
local_files_only: bool = False,
variant_locks: Optional[Dict[str, VariantLock]] = None,
user_agent: Optional[Union[str, dict]] = None,
) -> Tuple[str, Path]:
"""
Download a kernel for the current environment to the cache.
@ -123,6 +126,8 @@ def install_kernel(
Whether to only use local files and not download from the Hub.
variant_locks (`Dict[str, VariantLock]`, *optional*):
Optional dictionary of variant locks for validation.
user_agent (`Union[str, dict]`, *optional*):
The `user_agent` info to pass to `snapshot_download()` for internal telemetry.
Returns:
`Tuple[str, Path]`: A tuple containing the package name and the path to the variant directory.
@ -130,6 +135,7 @@ def install_kernel(
package_name = package_name_from_repo_id(repo_id)
variant = build_variant()
universal_variant = universal_build_variant()
user_agent = _get_user_agent(user_agent=user_agent)
repo_path = Path(
snapshot_download(
repo_id,
@ -137,6 +143,7 @@ def install_kernel(
cache_dir=CACHE_DIR,
revision=revision,
local_files_only=local_files_only,
user_agent=user_agent,
)
)
@ -213,7 +220,10 @@ def install_kernel_all_variants(
def get_kernel(
repo_id: str, revision: Optional[str] = None, version: Optional[str] = None
repo_id: str,
revision: Optional[str] = None,
version: Optional[str] = None,
user_agent: Optional[Union[str, dict]] = None,
) -> ModuleType:
"""
Load a kernel from the kernel hub.
@ -229,6 +239,8 @@ def get_kernel(
version (`str`, *optional*):
The kernel version to download. This can be a Python version specifier, such as `">=1.0.0,<2.0.0"`.
Cannot be used together with `revision`.
user_agent (`Union[str, dict]`, *optional*):
The `user_agent` info to pass to `snapshot_download()` for internal telemetry.
Returns:
`ModuleType`: The imported kernel module.
@ -245,7 +257,9 @@ def get_kernel(
```
"""
revision = select_revision_or_version(repo_id, revision, version)
package_name, package_path = install_kernel(repo_id, revision=revision)
package_name, package_path = install_kernel(
repo_id, revision=revision, user_agent=user_agent
)
return import_from_path(package_name, package_path / package_name / "__init__.py")
@ -501,3 +515,24 @@ def git_hash_object(data: bytes, object_type: str = "blob"):
def package_name_from_repo_id(repo_id: str) -> str:
return repo_id.split("/")[-1].replace("-", "_")
def _get_user_agent(
user_agent: Optional[Union[dict, str]] = None,
) -> Union[None, dict, str]:
import torch
from . import __version__
if os.getenv("DISABLE_TELEMETRY", "false").upper() in ENV_VARS_TRUE_VALUES:
return None
if user_agent is None:
user_agent = {
"kernels": __version__,
"torch": torch.__version__,
"build_variant": build_variant(),
"file_type": "kernel",
}
return user_agent

View File

@ -1,82 +1,70 @@
[
{
"repo_id": "kernels-community/activation",
"sha": "fd6842e88f1f23f198551d78a4541b8eb07e0538",
"sha": "83046852be158d525114f68513cd79fd88911b37",
"variants": {
"torch25-cxx11-cu118-x86_64-linux": {
"hash": "sha256-61e3e51b5b59b30d4a6ba943a5e6e4ef5a9c8260cc4bca40b9fb462c0777842b",
"hash_type": "git_lfs_concat"
},
"torch25-cxx11-cu121-x86_64-linux": {
"hash": "sha256-baa6b872040730bd1d676c011381f6f626fb96189837b828f587c806af8994fa",
"hash_type": "git_lfs_concat"
},
"torch25-cxx11-cu124-x86_64-linux": {
"hash": "sha256-c1ec7457847fa1f0e4ab43234dfc3cd0959977e03dc2ffe89b4f6b90970c7965",
"hash_type": "git_lfs_concat"
},
"torch25-cxx98-cu118-x86_64-linux": {
"hash": "sha256-412f9c841f20741e42f2c6cdb8c7da0e33ab436b219975acffe18b62b97ecd7c",
"hash_type": "git_lfs_concat"
},
"torch25-cxx98-cu121-x86_64-linux": {
"hash": "sha256-2fde7f97859506e000c1072b3916c0a75bc8cee750a9853ea8b68199e7b57bcd",
"hash_type": "git_lfs_concat"
},
"torch25-cxx98-cu124-x86_64-linux": {
"hash": "sha256-93309986f39a64a5630378108154866f0545178fa8dfef9b8f8ccfef9a78608e",
"hash_type": "git_lfs_concat"
},
"torch26-cxx11-cu118-x86_64-linux": {
"hash": "sha256-3284d3c64b76d92c1ee930bce8013aff307f16eefb16c2d5dea9f2ca70e71e1f",
"hash_type": "git_lfs_concat"
},
"torch26-cxx11-cu124-x86_64-linux": {
"hash": "sha256-36a8c93773c08ddf8ef624a8a6b2866be26d1861450dfe1ecac0bed59f9ffa47",
"hash_type": "git_lfs_concat"
},
"torch26-cxx11-cu126-aarch64-linux": {
"hash": "sha256-f5afb734520f587717665659798ff738a69e5ae1e34d4bd95624edd18fb165cd",
"hash_type": "git_lfs_concat"
},
"torch26-cxx11-cu126-x86_64-linux": {
"hash": "sha256-940841a7cb44f76c9a896d8b39f5bc0e0420f1c4c05ae9423da96778de4d1f2c",
"hash_type": "git_lfs_concat"
},
"torch26-cxx98-cu118-x86_64-linux": {
"hash": "sha256-8e0f907830c3acc8c6bebfc162c744012ff6973e8110d7bf8ecd74b492418204",
"hash_type": "git_lfs_concat"
},
"torch26-cxx98-cu124-x86_64-linux": {
"hash": "sha256-0833414cbe658baec55b7ff63537cddccc973fe99e3c03008cced5e66e38b6c1",
"hash_type": "git_lfs_concat"
},
"torch26-cxx98-cu126-aarch64-linux": {
"hash": "sha256-d94fa59a13a5b623b2071aadcd1e6c8477c4d557fd06ad144f15b46b1fc71aab",
"hash_type": "git_lfs_concat"
},
"torch26-cxx98-cu126-x86_64-linux": {
"hash": "sha256-64784f5f2f9e232d0f2fd824fbc47eadde505e3c232f351bead5b04c429c65c2",
"hash_type": "git_lfs_concat"
},
"torch27-cxx11-cu118-x86_64-linux": {
"hash": "sha256-bcba3765f061649bac0e5a9159bea8349ced4780e24a2330aa62ce0f8d3a9d78",
"hash_type": "git_lfs_concat"
},
"torch27-cxx11-cu126-aarch64-linux": {
"hash": "sha256-e4625df5706af025c70bd824d952b928d9a2965eeaefda72fc47be0fae680c5e",
"hash": "sha256-e34965c814c4c092fcb634ebadefe82ea9a05b98343f8ebdefa7305dcc05359e",
"hash_type": "git_lfs_concat"
},
"torch27-cxx11-cu126-x86_64-linux": {
"hash": "sha256-7d7d3e655f34a7b03d5603d7c1ab723ef3efc823291762421a8b3a4aa51bd405",
"hash": "sha256-5f92b35922b37224a416398a39a29b7e5f1aca1df17d5c69f1b9e9cdb7033561",
"hash_type": "git_lfs_concat"
},
"torch27-cxx11-cu128-aarch64-linux": {
"hash": "sha256-60e076194dcd55b32c5aca72f09816cba0fff52f340c8a063b17ff0577154d99",
"hash": "sha256-125967cb23bacd2cec443799f184ac08247dfff33f5027e54ee16d3779ca5986",
"hash_type": "git_lfs_concat"
},
"torch27-cxx11-cu128-x86_64-linux": {
"hash": "sha256-f0a3802382efdcd78b40601187a9c416579a24ef2ed5a60d2296ef0951a89597",
"hash": "sha256-496a84c99d7035a1b6f0ea1c026b751c3a2677956f4c1be546d3cc1505a5fdbb",
"hash_type": "git_lfs_concat"
},
"torch28-cxx11-cu126-aarch64-linux": {
"hash": "sha256-f0775a30ffa290c90aba3a41037e3ca91edb15b4a9367561fafd5f25455e117a",
"hash_type": "git_lfs_concat"
},
"torch28-cxx11-cu126-x86_64-linux": {
"hash": "sha256-081995e6230f306bdf6111186618794f2411cf0ffd9b4800330df60b4ebe1927",
"hash_type": "git_lfs_concat"
},
"torch28-cxx11-cu128-aarch64-linux": {
"hash": "sha256-b937fef62a0c1cd71ab98490b651c473577af209b9a3e2a6b452350283d8812c",
"hash_type": "git_lfs_concat"
},
"torch28-cxx11-cu128-x86_64-linux": {
"hash": "sha256-a3915686cc58641a3361ece63ab77b33e9d30315dea12547e4bda008d8810a01",
"hash_type": "git_lfs_concat"
},
"torch28-cxx11-cu129-aarch64-linux": {
"hash": "sha256-a24dca8e998f88be42491921c9df89d88a6112ca630acd2efc2dd34a64b91fcb",
"hash_type": "git_lfs_concat"
},
"torch28-cxx11-cu129-x86_64-linux": {
"hash": "sha256-df6c70a70f425db2f68b86561c6f93c5675c1d5e5d058766d88ab17472229907",
"hash_type": "git_lfs_concat"
},
"torch29-cxx11-cu126-aarch64-linux": {
"hash": "sha256-c120011c201072b4cfd70c2ba2d45c2f05337feaf604ddec3c6c4987def33ab3",
"hash_type": "git_lfs_concat"
},
"torch29-cxx11-cu126-x86_64-linux": {
"hash": "sha256-765a7f3279009979be4001a23c5c70e5e6ab9553098d67886731a5275a6d4b32",
"hash_type": "git_lfs_concat"
},
"torch29-cxx11-cu128-aarch64-linux": {
"hash": "sha256-266d057a9cd82b872a0e02f09ac5e2660fcffcf9a7b7fa1fa8ff33dc19c0f5c2",
"hash_type": "git_lfs_concat"
},
"torch29-cxx11-cu128-x86_64-linux": {
"hash": "sha256-6850e594ba4588f289b5904eb88eda5a41870ee20a3bf1586f3268307caf4b53",
"hash_type": "git_lfs_concat"
},
"torch29-cxx11-cu130-aarch64-linux": {
"hash": "sha256-23741b935462b53bdf868f8d1c9c8cff5f02f71ea3b0550df41dc8b030b0b474",
"hash_type": "git_lfs_concat"
},
"torch29-cxx11-cu130-x86_64-linux": {
"hash": "sha256-b884ae792dc1eada071f31645add0c2c76d479864f25aebcdd8318b675aaaf29",
"hash_type": "git_lfs_concat"
}
}

View File

@ -7,7 +7,7 @@ from pathlib import Path
from typing import List
import pytest
from huggingface_hub import delete_repo, model_info
from huggingface_hub import delete_repo, model_info, list_repo_refs
from kernels.cli import upload_kernels
@ -30,6 +30,7 @@ class UploadArgs:
kernel_dir: None
repo_id: None
private: False
branch: None
def next_filename(path: Path) -> Path:
@ -70,17 +71,23 @@ def get_filenames_from_a_repo(repo_id: str) -> List[str]:
@pytest.mark.token
@pytest.mark.is_staging_test
def test_kernel_upload_works_as_expected():
@pytest.mark.parametrize("branch", (None, "foo"))
def test_kernel_upload_works_as_expected(branch):
with tempfile.TemporaryDirectory() as tmpdir:
path = f"{tmpdir}/build/torch-universal/upload_test"
build_dir = Path(path)
build_dir.mkdir(parents=True, exist_ok=True)
script_path = build_dir / "foo.py"
script_path.write_text(PY_CONTENT)
upload_kernels(UploadArgs(tmpdir, REPO_ID, False))
upload_kernels(UploadArgs(tmpdir, REPO_ID, False, branch))
repo_filenames = get_filenames_from_a_repo(REPO_ID)
assert any(str(script_path.name) for f in repo_filenames)
if branch is not None:
refs = list_repo_refs(repo_id=REPO_ID)
assert any(ref_branch.name == branch for ref_branch in refs.branches)
delete_repo(repo_id=REPO_ID)
@ -93,7 +100,7 @@ def test_kernel_upload_deletes_as_expected():
build_dir.mkdir(parents=True, exist_ok=True)
script_path = build_dir / "foo_2025.py"
script_path.write_text(PY_CONTENT)
upload_kernels(UploadArgs(tmpdir, REPO_ID, False))
upload_kernels(UploadArgs(tmpdir, REPO_ID, False, None))
repo_filenames = get_filenames_from_a_repo(REPO_ID)
filename_to_change = get_filename_to_change(repo_filenames)
@ -105,7 +112,7 @@ def test_kernel_upload_deletes_as_expected():
changed_filename = next_filename(Path(filename_to_change))
script_path = build_dir / changed_filename
script_path.write_text(PY_CONTENT)
upload_kernels(UploadArgs(tmpdir, REPO_ID, False))
upload_kernels(UploadArgs(tmpdir, REPO_ID, False, None))
repo_filenames = get_filenames_from_a_repo(REPO_ID)
assert any(str(changed_filename) in k for k in repo_filenames), f"{repo_filenames=}"