imagenet.py 8.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218
  1. import os
  2. import shutil
  3. import tempfile
  4. from contextlib import contextmanager
  5. from typing import Any, Dict, Iterator, List, Optional, Tuple
  6. import torch
  7. from .folder import ImageFolder
  8. from .utils import check_integrity, extract_archive, verify_str_arg
  9. ARCHIVE_META = {
  10. "train": ("ILSVRC2012_img_train.tar", "1d675b47d978889d74fa0da5fadfb00e"),
  11. "val": ("ILSVRC2012_img_val.tar", "29b22e2961454d5413ddabcf34fc5622"),
  12. "devkit": ("ILSVRC2012_devkit_t12.tar.gz", "fa75699e90414af021442c21a62c3abf"),
  13. }
  14. META_FILE = "meta.bin"
  15. class ImageNet(ImageFolder):
  16. """`ImageNet <http://image-net.org/>`_ 2012 Classification Dataset.
  17. .. note::
  18. Before using this class, it is required to download ImageNet 2012 dataset from
  19. `here <https://image-net.org/challenges/LSVRC/2012/2012-downloads.php>`_ and
  20. place the files ``ILSVRC2012_devkit_t12.tar.gz`` and ``ILSVRC2012_img_train.tar``
  21. or ``ILSVRC2012_img_val.tar`` based on ``split`` in the root directory.
  22. Args:
  23. root (string): Root directory of the ImageNet Dataset.
  24. split (string, optional): The dataset split, supports ``train``, or ``val``.
  25. transform (callable, optional): A function/transform that takes in an PIL image
  26. and returns a transformed version. E.g, ``transforms.RandomCrop``
  27. target_transform (callable, optional): A function/transform that takes in the
  28. target and transforms it.
  29. loader (callable, optional): A function to load an image given its path.
  30. Attributes:
  31. classes (list): List of the class name tuples.
  32. class_to_idx (dict): Dict with items (class_name, class_index).
  33. wnids (list): List of the WordNet IDs.
  34. wnid_to_idx (dict): Dict with items (wordnet_id, class_index).
  35. imgs (list): List of (image path, class_index) tuples
  36. targets (list): The class_index value for each image in the dataset
  37. """
  38. def __init__(self, root: str, split: str = "train", **kwargs: Any) -> None:
  39. root = self.root = os.path.expanduser(root)
  40. self.split = verify_str_arg(split, "split", ("train", "val"))
  41. self.parse_archives()
  42. wnid_to_classes = load_meta_file(self.root)[0]
  43. super().__init__(self.split_folder, **kwargs)
  44. self.root = root
  45. self.wnids = self.classes
  46. self.wnid_to_idx = self.class_to_idx
  47. self.classes = [wnid_to_classes[wnid] for wnid in self.wnids]
  48. self.class_to_idx = {cls: idx for idx, clss in enumerate(self.classes) for cls in clss}
  49. def parse_archives(self) -> None:
  50. if not check_integrity(os.path.join(self.root, META_FILE)):
  51. parse_devkit_archive(self.root)
  52. if not os.path.isdir(self.split_folder):
  53. if self.split == "train":
  54. parse_train_archive(self.root)
  55. elif self.split == "val":
  56. parse_val_archive(self.root)
  57. @property
  58. def split_folder(self) -> str:
  59. return os.path.join(self.root, self.split)
  60. def extra_repr(self) -> str:
  61. return "Split: {split}".format(**self.__dict__)
  62. def load_meta_file(root: str, file: Optional[str] = None) -> Tuple[Dict[str, str], List[str]]:
  63. if file is None:
  64. file = META_FILE
  65. file = os.path.join(root, file)
  66. if check_integrity(file):
  67. return torch.load(file)
  68. else:
  69. msg = (
  70. "The meta file {} is not present in the root directory or is corrupted. "
  71. "This file is automatically created by the ImageNet dataset."
  72. )
  73. raise RuntimeError(msg.format(file, root))
  74. def _verify_archive(root: str, file: str, md5: str) -> None:
  75. if not check_integrity(os.path.join(root, file), md5):
  76. msg = (
  77. "The archive {} is not present in the root directory or is corrupted. "
  78. "You need to download it externally and place it in {}."
  79. )
  80. raise RuntimeError(msg.format(file, root))
  81. def parse_devkit_archive(root: str, file: Optional[str] = None) -> None:
  82. """Parse the devkit archive of the ImageNet2012 classification dataset and save
  83. the meta information in a binary file.
  84. Args:
  85. root (str): Root directory containing the devkit archive
  86. file (str, optional): Name of devkit archive. Defaults to
  87. 'ILSVRC2012_devkit_t12.tar.gz'
  88. """
  89. import scipy.io as sio
  90. def parse_meta_mat(devkit_root: str) -> Tuple[Dict[int, str], Dict[str, Tuple[str, ...]]]:
  91. metafile = os.path.join(devkit_root, "data", "meta.mat")
  92. meta = sio.loadmat(metafile, squeeze_me=True)["synsets"]
  93. nums_children = list(zip(*meta))[4]
  94. meta = [meta[idx] for idx, num_children in enumerate(nums_children) if num_children == 0]
  95. idcs, wnids, classes = list(zip(*meta))[:3]
  96. classes = [tuple(clss.split(", ")) for clss in classes]
  97. idx_to_wnid = {idx: wnid for idx, wnid in zip(idcs, wnids)}
  98. wnid_to_classes = {wnid: clss for wnid, clss in zip(wnids, classes)}
  99. return idx_to_wnid, wnid_to_classes
  100. def parse_val_groundtruth_txt(devkit_root: str) -> List[int]:
  101. file = os.path.join(devkit_root, "data", "ILSVRC2012_validation_ground_truth.txt")
  102. with open(file) as txtfh:
  103. val_idcs = txtfh.readlines()
  104. return [int(val_idx) for val_idx in val_idcs]
  105. @contextmanager
  106. def get_tmp_dir() -> Iterator[str]:
  107. tmp_dir = tempfile.mkdtemp()
  108. try:
  109. yield tmp_dir
  110. finally:
  111. shutil.rmtree(tmp_dir)
  112. archive_meta = ARCHIVE_META["devkit"]
  113. if file is None:
  114. file = archive_meta[0]
  115. md5 = archive_meta[1]
  116. _verify_archive(root, file, md5)
  117. with get_tmp_dir() as tmp_dir:
  118. extract_archive(os.path.join(root, file), tmp_dir)
  119. devkit_root = os.path.join(tmp_dir, "ILSVRC2012_devkit_t12")
  120. idx_to_wnid, wnid_to_classes = parse_meta_mat(devkit_root)
  121. val_idcs = parse_val_groundtruth_txt(devkit_root)
  122. val_wnids = [idx_to_wnid[idx] for idx in val_idcs]
  123. torch.save((wnid_to_classes, val_wnids), os.path.join(root, META_FILE))
  124. def parse_train_archive(root: str, file: Optional[str] = None, folder: str = "train") -> None:
  125. """Parse the train images archive of the ImageNet2012 classification dataset and
  126. prepare it for usage with the ImageNet dataset.
  127. Args:
  128. root (str): Root directory containing the train images archive
  129. file (str, optional): Name of train images archive. Defaults to
  130. 'ILSVRC2012_img_train.tar'
  131. folder (str, optional): Optional name for train images folder. Defaults to
  132. 'train'
  133. """
  134. archive_meta = ARCHIVE_META["train"]
  135. if file is None:
  136. file = archive_meta[0]
  137. md5 = archive_meta[1]
  138. _verify_archive(root, file, md5)
  139. train_root = os.path.join(root, folder)
  140. extract_archive(os.path.join(root, file), train_root)
  141. archives = [os.path.join(train_root, archive) for archive in os.listdir(train_root)]
  142. for archive in archives:
  143. extract_archive(archive, os.path.splitext(archive)[0], remove_finished=True)
  144. def parse_val_archive(
  145. root: str, file: Optional[str] = None, wnids: Optional[List[str]] = None, folder: str = "val"
  146. ) -> None:
  147. """Parse the validation images archive of the ImageNet2012 classification dataset
  148. and prepare it for usage with the ImageNet dataset.
  149. Args:
  150. root (str): Root directory containing the validation images archive
  151. file (str, optional): Name of validation images archive. Defaults to
  152. 'ILSVRC2012_img_val.tar'
  153. wnids (list, optional): List of WordNet IDs of the validation images. If None
  154. is given, the IDs are loaded from the meta file in the root directory
  155. folder (str, optional): Optional name for validation images folder. Defaults to
  156. 'val'
  157. """
  158. archive_meta = ARCHIVE_META["val"]
  159. if file is None:
  160. file = archive_meta[0]
  161. md5 = archive_meta[1]
  162. if wnids is None:
  163. wnids = load_meta_file(root)[1]
  164. _verify_archive(root, file, md5)
  165. val_root = os.path.join(root, folder)
  166. extract_archive(os.path.join(root, file), val_root)
  167. images = sorted(os.path.join(val_root, image) for image in os.listdir(val_root))
  168. for wnid in set(wnids):
  169. os.mkdir(os.path.join(val_root, wnid))
  170. for wnid, img_file in zip(wnids, images):
  171. shutil.move(img_file, os.path.join(val_root, wnid, os.path.basename(img_file)))