使用GPU進行推論(inference) - jenhaoyang/ml_blog GitHub Wiki

if torch.cuda.is_available():
    print('PyTorch can use GPU on current machine!')

model = MyModel(*args, **kwargs)
model.load_state_dict(torch.load(your_model_file_path))
model.eval()  # 设置成evaluation模式
if torch.cuda.is_available():
    print('PyTorch can use GPU on current machine!')
    device = torch.device("cuda")
    model.to(device)

if torch.cuda.is_available():  # GPU available
    model_input_tensor = model_input_tensor.to(torch.device('cuda'))
model_output = model(model_input_tensor)  # inference

參考:
https://www.codelast.com/%E5%8E%9F%E5%88%9B-pytorch%E5%81%9Ainference-prediction%E7%9A%84%E6%97%B6%E5%80%99%E5%A6%82%E4%BD%95%E4%BD%BF%E7%94%A8gpu/