train_bow.py 5.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107
  1. # Approach 3: Local features
  2. # This script is used for generating a BOW vocabulary using
  3. # densely sampeled SIFT features on Lapse images.
  4. # See eval_bow.py for evaluation.
  5. import argparse
  6. import os
  7. import numpy as np
  8. from timeit import default_timer as timer
  9. from datetime import timedelta
  10. from py.Dataset import Dataset
  11. from py.LocalFeatures import extract_descriptors, generate_dictionary_from_descriptors, generate_bow_features
  12. def main():
  13. parser = argparse.ArgumentParser(description="BOW train script")
  14. parser.add_argument("dataset_dir", type=str, help="Directory of the dataset containing all session folders")
  15. parser.add_argument("session_name", type=str, help="Name of the session to use for Lapse images (e.g. marten_01)")
  16. parser.add_argument("--clusters", type=int, help="Number of clusters / BOW vocabulary size", default=1024)
  17. parser.add_argument("--step_size", type=int, help="DSIFT keypoint step size. Smaller step size = more keypoints.", default=30)
  18. parser.add_argument("--keypoint_size", type=int, help="DSIFT keypoint size. Defaults to step_size.", default=-1)
  19. parser.add_argument("--include_motion", action="store_true", help="Include motion images for training.")
  20. args = parser.parse_args()
  21. if args.keypoint_size <= 0:
  22. args.keypoint_size = args.step_size
  23. print(f"Using keypoint size {args.keypoint_size} with step size {args.step_size}.")
  24. ds = Dataset(args.dataset_dir)
  25. session = ds.create_session(args.session_name)
  26. save_dir = f"./bow_train_NoBackup/{session.name}"
  27. suffix = "_motion" if args.include_motion else ""
  28. lapse_dscs_file = os.path.join(save_dir, f"lapse_dscs_{args.step_size}_{args.keypoint_size}.npy")
  29. motion_dscs_file = os.path.join(save_dir, f"motion_dscs_{args.step_size}_{args.keypoint_size}.npy")
  30. dictionary_file = os.path.join(save_dir, f"bow_dict_{args.step_size}_{args.keypoint_size}_{args.clusters}{suffix}.npy")
  31. train_feat_file = os.path.join(save_dir, f"bow_train_{args.step_size}_{args.keypoint_size}_{args.clusters}{suffix}.npy")
  32. # Lapse DSIFT descriptors
  33. if os.path.isfile(lapse_dscs_file):
  34. if os.path.isfile(dictionary_file):
  35. # if dictionary file already exists, we don't need the lapse descriptors
  36. print(f"{dictionary_file} already exists, skipping lapse descriptor extraction...")
  37. else:
  38. print(f"{lapse_dscs_file} already exists, loading lapse descriptors from file... ", end="")
  39. lapse_dscs = np.load(lapse_dscs_file)
  40. assert lapse_dscs.shape[-1] == 128
  41. lapse_dscs = lapse_dscs.reshape(-1, 128)
  42. print(f"Loaded {len(lapse_dscs)} lapse descriptors!")
  43. else:
  44. # Step 1 - extract dense SIFT descriptors
  45. print("Extracting lapse descriptors...")
  46. lapse_dscs = extract_descriptors(list(session.generate_lapse_images()), kp_step=args.step_size, kp_size=args.keypoint_size)
  47. os.makedirs(save_dir, exist_ok=True)
  48. np.save(lapse_dscs_file, lapse_dscs)
  49. # Motion DSIFT descriptors
  50. if args.include_motion:
  51. if os.path.isfile(motion_dscs_file):
  52. if os.path.isfile(dictionary_file):
  53. # if dictionary file already exists, we don't need the descriptors
  54. print(f"{dictionary_file} already exists, skipping motion descriptor extraction...")
  55. else:
  56. print(f"{motion_dscs_file} already exists, loading motion descriptors from file...", end="")
  57. motion_dscs = np.load(motion_dscs_file)
  58. assert motion_dscs.shape[-1] == 128
  59. motion_dscs = motion_dscs.reshape(-1, 128)
  60. print(f"Loaded {len(motion_dscs)} motion descriptors!")
  61. lapse_dscs = np.concatenate([lapse_dscs, motion_dscs])
  62. else:
  63. # Step 1b - extract dense SIFT descriptors from motion images
  64. print("Extracting motion descriptors...")
  65. motion_dscs = extract_descriptors(list(session.generate_motion_images()), kp_step=args.step_size, kp_size=args.keypoint_size)
  66. os.makedirs(save_dir, exist_ok=True)
  67. np.save(motion_dscs_file, motion_dscs)
  68. lapse_dscs = np.concatenate([lapse_dscs, motion_dscs])
  69. # BOW dictionary
  70. if os.path.isfile(dictionary_file):
  71. print(f"{dictionary_file} already exists, loading BOW dictionary from file...")
  72. dictionary = np.load(dictionary_file)
  73. else:
  74. # Step 2 - create BOW dictionary from Lapse SIFT descriptors
  75. print(f"Creating BOW vocabulary with {args.clusters} clusters from {len(lapse_dscs)} descriptors...")
  76. start_time = timer()
  77. dictionary = generate_dictionary_from_descriptors(lapse_dscs, args.clusters)
  78. end_time = timer()
  79. delta_time = timedelta(seconds=end_time-start_time)
  80. print(f"Clustering took {delta_time}.")
  81. np.save(dictionary_file, dictionary)
  82. # Extract Lapse BOW features using vocabulary (train data)
  83. if os.path.isfile(train_feat_file):
  84. print(f"{train_feat_file} already exists, skipping lapse BOW feature extraction...")
  85. else:
  86. # Step 3 - calculate training data (BOW features of Lapse images)
  87. print(f"Extracting BOW features from Lapse images...")
  88. features = [feat for _, feat in generate_bow_features(list(session.generate_lapse_images()), dictionary, kp_step=args.step_size, kp_size=args.keypoint_size)]
  89. np.save(train_feat_file, features)
  90. print("Complete!")
  91. if __name__ == "__main__":
  92. main()