generate_bindings.py 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311
  1. # This file is part of libigl, a simple c++ geometry processing library.
  2. #
  3. # Copyright (C) 2017 Sebastian Koch <s.koch@tu-berlin.de> and Daniele Panozzo <daniele.panozzo@gmail.com>
  4. #
  5. # This Source Code Form is subject to the terms of the Mozilla Public License
  6. # v. 2.0. If a copy of the MPL was not distributed with this file, You can
  7. # obtain one at http://mozilla.org/MPL/2.0/.
  8. #!/usr/bin/env python3
  9. #
  10. # Syntax: generate_docstrings.py <path_to_c++_header_files> <path_to_python_files>
  11. #
  12. # Extract documentation from C++ header files to use it in libiglPython bindings
  13. #
  14. import os, sys, glob
  15. import pickle
  16. import shutil
  17. from joblib import Parallel, delayed
  18. from multiprocessing import cpu_count
  19. from mako.template import Template
  20. from parser import parse
  21. # http://stackoverflow.com/questions/3207219/how-to-list-all-files-of-a-directory-in-python
  22. def get_filepaths(directory):
  23. """
  24. This function will generate the file names in a directory
  25. tree by walking the tree either top-down or bottom-up. For each
  26. directory in the tree rooted at directory top (including top itself),
  27. it yields a 3-tuple (dirpath, dirnames, filenames).
  28. """
  29. file_paths = [] # List which will store all of the full filepaths.
  30. # Walk the tree.
  31. for root, directories, files in os.walk(directory):
  32. for filename in files:
  33. # Join the two strings in order to form the full filepath.
  34. filepath = os.path.join(root, filename)
  35. file_paths.append(filepath) # Add it to the list.
  36. return file_paths # Self-explanatory.
  37. def get_name_from_path(path, basepath, prefix, postfix):
  38. f_clean = path[len(basepath):]
  39. f_clean = f_clean.replace(basepath, "")
  40. f_clean = f_clean.replace(postfix, "")
  41. f_clean = f_clean.replace(prefix, "")
  42. f_clean = f_clean.replace("/", "_")
  43. f_clean = f_clean.replace("\\", "_")
  44. f_clean = f_clean.replace(" ", "_")
  45. f_clean = f_clean.replace(".", "_")
  46. return f_clean
  47. def map_parameter_types(name, cpp_type, parsed_types, errors, enum_types):
  48. # TODO Replace with proper regex matching and derive types from templates, comment parsing, names in cpp files
  49. # CAUTION: This is work in progress mapping code to get a grip of the problem
  50. # Types to map
  51. # const int dim -> const int& dim ?
  52. result = []
  53. if cpp_type.startswith("const"):
  54. result.append("const ")
  55. cpp_type = cpp_type[6:] # Strip const part
  56. # Handle special types
  57. skip_parsing = False
  58. if cpp_type.startswith("MatY"):
  59. result.append("Eigen::SparseMatrix<double>&")
  60. skip_parsing = True
  61. if cpp_type.startswith("Eigen::Matrix<unsigned char, Eigen::Dynamic, Eigen::Dynamic>"):
  62. result.append("Eigen::Matrix<unsigned char, Eigen::Dynamic, Eigen::Dynamic>")
  63. skip_parsing = True
  64. if cpp_type == "std::vector<std::vector<Scalar> > &":
  65. result.append("std::vector<std::vector<double> > &")
  66. skip_parsing = True
  67. if cpp_type == "std::vector<std::vector<Index> > &":
  68. result.append("std::vector<std::vector<int> > &")
  69. skip_parsing = True
  70. for constant in enum_types:
  71. if cpp_type.endswith(constant):
  72. result.append(cpp_type)
  73. skip_parsing = True
  74. if len(parsed_types) == 0:
  75. errors.append("Empty typechain: %s" % cpp_type)
  76. if cpp_type == "int" or cpp_type == "bool" or cpp_type == "unsigned int":
  77. return cpp_type, True
  78. else:
  79. return cpp_type, False
  80. # print(parsed_types, cpp_type)
  81. if not skip_parsing:
  82. for i, t in enumerate(parsed_types):
  83. if t == "Eigen":
  84. result.append("Eigen::")
  85. continue
  86. if t == "std":
  87. result.append("std::")
  88. continue
  89. if t == "PlainObjectBase" or t == "MatrixBase":
  90. if name == "F":
  91. result.append("MatrixXi&")
  92. elif name == "V":
  93. result.append("MatrixXd&")
  94. else:
  95. result.append("MatrixXd&")
  96. break
  97. if t == "MatrixXi" or t == "VectorXi":
  98. result.append("MatrixXi&")
  99. break
  100. if t == "MatrixXd" or t == "VectorXd":
  101. result.append("MatrixXd&")
  102. break
  103. if t == "SparseMatrix" and len(parsed_types) >= i + 2 and (
  104. parsed_types[i + 1] == "Scalar" or parsed_types[i + 1] == "T"):
  105. result.append("SparseMatrix<double>&")
  106. break
  107. if t == "SparseVector" and len(parsed_types) >= i + 2 and (parsed_types[i + 1] == "Scalar" or parsed_types[
  108. i + 1] == "T"):
  109. result.append("SparseMatrix<double>&")
  110. break
  111. if t == "bool" or t == "int" or t == "double" or t == "unsigned" or t == "string":
  112. if cpp_type.endswith("&"):
  113. result.append(t + " &")
  114. else:
  115. result.append(t)
  116. break
  117. else:
  118. errors.append("Unknown typechain: %s" % cpp_type)
  119. return cpp_type, False
  120. return "".join(result), True
  121. if __name__ == '__main__':
  122. if len(sys.argv) != 2:
  123. print('Syntax: %s <path_to_c++_files>' % sys.argv[0])
  124. exit(-1)
  125. errors = {"missing": [], "empty": [], "others": [], "incorrect": [], "render": [], "various": []}
  126. files = {"complete": [], "partial": [], "errors": [], "others": [], "empty": []}
  127. # List all files in the given folder and subfolders
  128. cpp_base_path = sys.argv[1]
  129. cpp_file_paths = get_filepaths(cpp_base_path)
  130. # Add all the .h filepaths to a dict
  131. print("Collecting cpp files for parsing...")
  132. mapping = {}
  133. cppmapping = {}
  134. for f in cpp_file_paths:
  135. if f.endswith(".h"):
  136. name = get_name_from_path(f, cpp_base_path, "", ".h")
  137. mapping[name] = f
  138. if f.endswith(".cpp"):
  139. name = get_name_from_path(f, cpp_base_path, "", ".cpp")
  140. cppmapping[name] = f
  141. # Add all python binding files to a list
  142. implemented_names = list(mapping.keys()) # ["point_mesh_squared_distance"]
  143. implemented_names.sort()
  144. single_postfix = ""
  145. single_prefix = ""
  146. # Create a list of all cpp header files
  147. files_to_parse = []
  148. cppfiles_to_parse = []
  149. for n in implemented_names:
  150. files_to_parse.append(mapping[n])
  151. if n not in cppmapping:
  152. errors["missing"].append("No cpp source file for function %s found." % n)
  153. else:
  154. cppfiles_to_parse.append(cppmapping[n])
  155. # Parse c++ header files
  156. print("Parsing header files...")
  157. load_headers = False
  158. if load_headers:
  159. with open("headers.dat", 'rb') as fs:
  160. dicts = pickle.load(fs)
  161. else:
  162. job_count = cpu_count()
  163. dicts = Parallel(n_jobs=job_count)(delayed(parse)(path) for path in files_to_parse)
  164. if not load_headers:
  165. print("Saving parsed header files...")
  166. with open("headers.dat", 'wb') as fs:
  167. pickle.dump(dicts, fs)
  168. # Not yet needed, as explicit template parsing does not seem to be supported in clang
  169. # Parse c++ source files
  170. # cppdicts = Parallel(n_jobs=job_count)(delayed(parse)(path) for path in cppfiles_to_parse)
  171. # Change directory to become independent of execution directory
  172. print("Generating directory tree for binding files...")
  173. path = os.path.dirname(__file__)
  174. if path != "":
  175. os.chdir(path)
  176. try:
  177. shutil.rmtree("generated")
  178. except:
  179. pass # Ignore missing generated directory
  180. os.makedirs("generated/complete")
  181. os.mkdir("generated/partial")
  182. print("Generating and writing binding files...")
  183. for idx, n in enumerate(implemented_names):
  184. d = dicts[idx]
  185. contained_elements = sum(map(lambda x: len(x), d.values()))
  186. # Skip files that don't contain functions/enums/classes
  187. if contained_elements == 0:
  188. errors["empty"].append("Function %s contains no parseable content in cpp header. Something might be wrong." % n)
  189. files["empty"].append(n)
  190. continue
  191. # Add functions with classes to others
  192. if len(d["classes"]) != 0 or len(d["structs"]) != 0:
  193. errors["others"].append("Function %s contains classes/structs in cpp header. Skipping" % n)
  194. files["others"].append(n)
  195. continue
  196. # Work on files that contain only functions/enums and namespaces
  197. if len(d["functions"]) + len(d["namespaces"]) + len(d["enums"]) == contained_elements:
  198. correct_functions = []
  199. incorrect_functions = []
  200. # Collect enums to generate binding files
  201. enums = []
  202. enum_types = []
  203. for e in d["enums"]:
  204. enums.append({"name": e.name, "namespaces": d["namespaces"], "constants": e.constants})
  205. enum_types.append(e.name)
  206. # Collect functions to generate binding files
  207. for f in d["functions"]:
  208. parameters = []
  209. correct_function = True
  210. f_errors = []
  211. for p in f.parameters:
  212. typ, correct = map_parameter_types(p[0], p[1], p[2], f_errors, enum_types)
  213. correct_function &= correct
  214. parameters.append({"name": p[0], "type": typ})
  215. if correct_function and len(parameters) > 0: #TODO add constants like EPS
  216. correct_functions.append({"parameters": parameters, "namespaces": d["namespaces"], "name": f.name})
  217. elif len(parameters) > 0:
  218. incorrect_functions.append({"parameters": parameters, "namespaces": d["namespaces"], "name": f.name})
  219. errors["incorrect"].append("Incorrect function in %s: %s, %s\n" % (n, f.name, ",".join(f_errors)))
  220. else:
  221. errors["various"].append("Function without pars in %s: %s, %s\n" % (n, f.name, ","
  222. "".join(f_errors)))
  223. # Write binding files
  224. try:
  225. tpl = Template(filename='basic_function.mako')
  226. rendered = tpl.render(functions=correct_functions, enums=enums)
  227. tpl1 = Template(filename='basic_function.mako')
  228. rendered1 = tpl.render(functions=incorrect_functions, enums=enums)
  229. path = "generated/"
  230. if len(incorrect_functions) == 0 and (len(correct_functions) != 0 or len(enums) != 0):
  231. path += "complete/"
  232. with open(path + single_prefix + "py_" + n + ".cpp", 'w') as fs:
  233. fs.write(rendered)
  234. files["complete"].append(n)
  235. else:
  236. path += "partial/"
  237. with open(path + single_prefix + "py_" + n + ".cpp", 'w') as fs:
  238. fs.write("// COMPLETE BINDINGS ========================\n")
  239. fs.write(rendered)
  240. fs.write("\n\n\n\n// INCOMPLETE BINDINGS ========================\n")
  241. fs.write(rendered1)
  242. if len(correct_functions) != 0:
  243. files["partial"].append(n)
  244. else:
  245. files["errors"].append(n)
  246. except Exception as e:
  247. files["errors"].append(n)
  248. errors["render"].append("Template rendering failed:" + n + " " + str(correct_functions) + ", incorrect "
  249. "functions are " + str(
  250. incorrect_functions) + str(e) + "\n")
  251. print("Writing error and overview files...")
  252. with open("errors.txt" + single_postfix, 'w') as fs:
  253. l = list(errors.keys())
  254. l.sort()
  255. for k in l:
  256. fs.write("%s: %i \n" %(k, len(errors[k])))
  257. fs.writelines("\n".join(errors[k]))
  258. fs.write("\n\n\n")
  259. with open("files.txt" + single_postfix, 'w') as fs:
  260. l = list(files.keys())
  261. l.sort()
  262. for k in l:
  263. fs.write("%s: %i \n" %(k, len(files[k])))
  264. fs.writelines("\n".join(files[k]))
  265. fs.write("\n\n\n")