使用GPU進行推論(inference) - jenhaoyang/ml_blog GitHub Wiki
if torch.cuda.is_available():
print('PyTorch can use GPU on current machine!')
model = MyModel(*args, **kwargs)
model.load_state_dict(torch.load(your_model_file_path))
model.eval() # 设置成evaluation模式
if torch.cuda.is_available():
print('PyTorch can use GPU on current machine!')
device = torch.device("cuda")
model.to(device)
if torch.cuda.is_available(): # GPU available
model_input_tensor = model_input_tensor.to(torch.device('cuda'))
model_output = model(model_input_tensor) # inference