-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathdevice.py
More file actions
56 lines (40 loc) · 1.35 KB
/
device.py
File metadata and controls
56 lines (40 loc) · 1.35 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
from typing import Optional
import torch
def get_best_device() -> torch.device:
"""Return the optimal device based on platform and available hardware.
Priority order: CUDA > XPU > MPS > CPU
"""
if torch.cuda.is_available():
return torch.device("cuda")
if hasattr(torch, "xpu") and torch.xpu.is_available():
return torch.device("xpu")
if torch.backends.mps.is_available():
return torch.device("mps")
return torch.device("cpu")
def get_best_dtype(device: Optional[torch.device] = None) -> torch.dtype:
"""Return the optimal dtype for the given device.
Args:
device: The target device. If None, uses get_best_device().
Returns:
torch.dtype: bfloat16/float16 for GPU, float32 for CPU.
"""
if device is None:
device = get_best_device()
device_type = device.type
if device_type == "cuda":
if torch.cuda.is_bf16_supported():
return torch.bfloat16
return torch.float16
if device_type == "xpu":
try:
if (
hasattr(torch.xpu, "is_bf16_supported")
and torch.xpu.is_bf16_supported()
):
return torch.bfloat16
except Exception:
pass
return torch.float16
if device_type == "mps":
return torch.float16
return torch.float32