-
Notifications
You must be signed in to change notification settings - Fork 183
Expand file tree
/
Copy pathpytorch-screen.py
More file actions
45 lines (37 loc) · 1.58 KB
/
pytorch-screen.py
File metadata and controls
45 lines (37 loc) · 1.58 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: MIT-0
import torch
try:
from rich.console import Console
print = Console(force_terminal=True, force_jupyter=False).out
except ModuleNotFoundError:
pass
print(f"{torch.cuda.is_available()=}")
print(f"{torch.version.cuda=}")
print(f"{torch.backends.cuda.is_built()=}")
print(f"{torch.backends.cuda.matmul.allow_tf32=}")
print(f"{torch.backends.cuda.matmul.allow_fp16_reduced_precision_reduction=}")
print(f"{torch.backends.cuda.cufft_plan_cache=}")
print(f"{torch.backends.cuda.preferred_linalg_library(backend=None)=}")
print(f"{torch.backends.cuda.flash_sdp_enabled()=}")
print(f"{torch.backends.cuda.math_sdp_enabled()=}")
print(f"{torch.backends.cudnn.version()=}")
print(f"{torch.backends.cudnn.is_available()=}")
print(f"{torch.backends.cudnn.enabled=}")
print(f"{torch.backends.cudnn.allow_tf32=}")
print(f"{torch.backends.cudnn.deterministic=}")
print(f"{torch.backends.cudnn.benchmark=}")
print(f"{torch.backends.cudnn.benchmark_limit=}")
print(f"{torch.backends.mkl.is_available()=}")
print(f"{torch.backends.mkldnn.is_available()=}")
print(f"{torch.backends.openmp.is_available()=}")
try:
print(f"{torch.backends.opt_einsum.is_available()=}")
print(f"{torch.backends.opt_einsum.get_opt_einsum()=}")
print(f"{torch.backends.opt_einsum.enabled=}")
print(f"{torch.backends.opt_einsum.strategy=}")
except AttributeError:
pass
print(f"{torch.distributed.is_available()=}")
print(f"{torch.distributed.is_mpi_available()=}")
print(f"{torch.distributed.is_nccl_available()=}")