forked from lance-format/lance
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathconftest.py
More file actions
59 lines (46 loc) · 1.9 KB
/
conftest.py
File metadata and controls
59 lines (46 loc) · 1.9 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
# SPDX-License-Identifier: Apache-2.0
# SPDX-FileCopyrightText: Copyright The Lance Authors
"""
pytest configurations for benchmarks.
For configuration that is shared between tests and benchmarks, see ../conftest.py
"""
from pathlib import Path
import pytest
@pytest.fixture(scope="session")
def data_dir():
"""Return the path to the benchmark data directory.
This directory holds tests datasets so they can be cached between runs."""
return Path(__file__).parent.parent.parent / "benchmark_data"
def disable_items_with_mark(items, mark, reason):
skipper = pytest.mark.skip(reason=reason)
for item in items:
if mark in item.keywords:
item.add_marker(skipper)
# These are initialization hooks and must have an exact name for pytest to pick them up
# https://docs.pytest.org/en/7.1.x/reference/reference.html
def pytest_collection_modifyitems(config, items):
try:
import torch
# torch.cuda.is_available will return True on some CI machines even though any
# attempt to use CUDA will then fail. torch.cuda.device_count seems to be more
# reliable
if (
torch.backends.cuda.is_built()
and not torch.cuda.is_available
or torch.cuda.device_count() <= 0
):
disable_items_with_mark(
items, "cuda", "torch is installed but cuda is not available"
)
if (
not torch.backends.mps.is_available()
or not torch.backends.mps.is_built()
):
disable_items_with_mark(
items, "gpu", "torch is installed but no gpu is available"
)
except ImportError as err:
reason = f"torch not installed ({err})"
disable_items_with_mark(items, "torch", reason)
disable_items_with_mark(items, "cuda", reason)
disable_items_with_mark(items, "gpu", reason)