File size: 389 Bytes
7bd11ed
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
from typing import List

import torch


def torch_device():
    device = (
        f"cuda:{torch.cuda.current_device()}"
        if torch.cuda.is_available()
        else ("mps" if torch.backends.mps.is_available() else "cpu")
    )
    return device


def split(iterable: List, chunk_size: int):
    for i in range(0, len(iterable), chunk_size):
        yield iterable[i: i + chunk_size]