You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

config.py 2.2KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172
  1. """Configuration."""
  2. class Config:
  3. """Variables Configuration Class."""
  4. version = "v1.2.5"
  5. checkpoints_version = "v0.0.1"
  6. checkpoints_cdn = "https://link.dreamnet.tech/ipns/Qman5Qzv6YCPW9A3bw2wSgxCkus9RAxySKNNeHeM9LzfFs/Projects/Checkpoints/Releases/{}.zip"
  7. # experiment specifics
  8. norm = "batch" # instance normalization or batch normalization
  9. use_dropout = False # use dropout for the generator
  10. data_type = 32 # Supported data type i.e. 8, 16, 32 bit
  11. # input/output sizes
  12. batch_size = 1 # input batch size
  13. input_nc = 3 # of input image channels
  14. output_nc = 3 # of output image channels
  15. # for setting inputs
  16. # if true, takes images in order to make batches, otherwise takes them randomly
  17. serial_batches = True
  18. n_threads = (
  19. 0
  20. ) # threads for loading data. Keep this value at 0! see: https://github.com/pytorch/pytorch/issues/12831
  21. # Maximum number of samples allowed per dataset. If the dataset directory contains more than max_dataset_size,
  22. # only a subset is loaded.
  23. max_dataset_size = 1
  24. # for generator
  25. net_g = "global" # selects model to use for net_g
  26. ngf = 64 # of gen filters in first conv layer
  27. n_downsample_global = 4 # number of downsampling layers in net_g
  28. n_blocks_global = (
  29. 9
  30. ) # number of residual blocks in the global generator network
  31. n_blocks_local = (
  32. 0
  33. ) # number of residual blocks in the local enhancer network
  34. n_local_enhancers = 0 # number of local enhancers to use
  35. # number of epochs that we only train the outmost local enhancer
  36. niter_fix_global = 0
  37. # Image requirement
  38. desired_size = 512
  39. desired_shape = 512, 512, 3
  40. # Argparser dict
  41. args = {}
  42. # Log
  43. log = None
  44. # Multiprocessing
  45. @staticmethod
  46. def multiprocessing():
  47. """
  48. Return multiprocessing status.
  49. :return: <boolean> True is multiprocessing can be use
  50. """
  51. return Config.args['n_cores'] > 1
  52. @staticmethod
  53. def cuda_multiprocessing():
  54. """
  55. Return multiprocessing status.
  56. :return: <boolean> True is multiprocessing can be use
  57. """
  58. return Config.args['gpu_ids'] is None and Config.args['n_cores'] > 1