You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

gpu_info.py 724B

12345678910111213141516171819202122232425262728293031323334
  1. """gpu-info logic."""
  2. import json as j
  3. from torch import cuda
  4. from config import Config as Conf
  5. def get_info():
  6. """
  7. Get gpu info.
  8. :return: <dict> gpu info
  9. """
  10. return {
  11. "has_cuda": cuda.is_available(),
  12. "devices": [] if not cuda.is_available() else [cuda.get_device_name(i) for i in range(cuda.device_count())],
  13. }
  14. def main(_):
  15. """
  16. Start gpu info main logic.
  17. :param _: None
  18. :return: None
  19. """
  20. info = get_info()
  21. if not Conf.args['json']:
  22. Conf.log.info("Has Cuda: {}".format(info["has_cuda"]))
  23. for (i, device) in enumerate(info["devices"]):
  24. Conf.log.info("GPU {}: {}".format(i, device))
  25. else:
  26. print(j.dumps(info))