builder.py 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430
  1. import ap_git
  2. from build_manager import (
  3. BuildManager as bm,
  4. )
  5. import subprocess
  6. import os
  7. import shutil
  8. import logging
  9. import tarfile
  10. from metadata_manager import (
  11. APSourceMetadataFetcher as apfetch,
  12. RemoteInfo,
  13. VehiclesManager as vehm
  14. )
  15. from pathlib import Path
  16. class Builder:
  17. """
  18. Processes build requests, perform builds and ship build artifacts
  19. to the destination directory shared by BuildManager.
  20. """
  21. def __init__(self, workdir: str, source_repo: ap_git.GitRepo) -> None:
  22. """
  23. Initialises the Builder class.
  24. Parameters:
  25. workdir (str): Workspace for the builder.
  26. source_repo (ap_git.GitRepo): Ardupilot repository to be used for
  27. retrieving source for doing builds.
  28. Raises:
  29. RuntimeError: If BuildManager or APSourceMetadataFetcher is not
  30. initialised.
  31. """
  32. if bm.get_singleton() is None:
  33. raise RuntimeError(
  34. "BuildManager should be initialized first."
  35. )
  36. if apfetch.get_singleton() is None:
  37. raise RuntimeError(
  38. "APSourceMetadataFetcher should be initialised first."
  39. )
  40. if vehm.get_singleton() is None:
  41. raise RuntimeError(
  42. "VehiclesManager should be initialised first."
  43. )
  44. self.__workdir_parent = workdir
  45. self.__master_repo = source_repo
  46. self.logger = logging.getLogger(__name__)
  47. def __log_build_info(self, build_id: str) -> None:
  48. """
  49. Logs the build information to the build log.
  50. Parameters:
  51. build_id (str): Unique identifier for the build.
  52. """
  53. build_info = bm.get_singleton().get_build_info(build_id)
  54. logpath = bm.get_singleton().get_build_log_path(build_id)
  55. with open(logpath, "a") as build_log:
  56. build_log.write(f"Vehicle: {build_info.vehicle}\n"
  57. f"Board: {build_info.board}\n"
  58. f"Remote URL: {build_info.remote_info.url}\n"
  59. f"git-sha: {build_info.git_hash}\n"
  60. "---\n"
  61. "Selected Features:\n")
  62. for d in build_info.selected_features:
  63. build_log.write(f"{d}\n")
  64. build_log.write("---\n")
  65. def __generate_extrahwdef(self, build_id: str) -> None:
  66. """
  67. Generates the extra hardware definition file (`extra_hwdef.dat`) for
  68. the build.
  69. Parameters:
  70. build_id (str): Unique identifier for the build.
  71. Raises:
  72. RuntimeError: If the parent directory for putting `extra_hwdef.dat`
  73. does not exist.
  74. """
  75. # Log to build log
  76. logpath = bm.get_singleton().get_build_log_path(build_id)
  77. with open(logpath, "a") as build_log:
  78. build_log.write("Generating extrahwdef file...\n")
  79. path = self.__get_path_to_extra_hwdef(build_id)
  80. self.logger.debug(
  81. f"Path to extra_hwdef for build id {build_id}: {path}"
  82. )
  83. if not os.path.exists(os.path.dirname(path)):
  84. raise RuntimeError(
  85. f"Create parent directory '{os.path.dirname(path)}' "
  86. "before writing extra_hwdef.dat"
  87. )
  88. build_info = bm.get_singleton().get_build_info(build_id)
  89. selected_features = build_info.selected_features
  90. self.logger.debug(
  91. f"Selected features for {build_id}: {selected_features}"
  92. )
  93. all_features = apfetch.get_singleton().get_build_options_at_commit(
  94. remote=build_info.remote_info.name,
  95. commit_ref=build_info.git_hash,
  96. )
  97. all_defines = {
  98. feature.define
  99. for feature in all_features
  100. }
  101. enabled_defines = selected_features.intersection(all_defines)
  102. disabled_defines = all_defines.difference(enabled_defines)
  103. self.logger.info(f"Enabled defines for {build_id}: {enabled_defines}")
  104. self.logger.info(f"Disabled defines for {build_id}: {enabled_defines}")
  105. with open(self.__get_path_to_extra_hwdef(build_id), "w") as f:
  106. # Undefine all defines at the beginning
  107. for define in all_defines:
  108. f.write(f"undef {define}\n")
  109. # Enable selected defines
  110. for define in enabled_defines:
  111. f.write(f"define {define} 1\n")
  112. # Disable the remaining defines
  113. for define in disabled_defines:
  114. f.write(f"define {define} 0\n")
  115. def __ensure_remote_added(self, remote_info: RemoteInfo) -> None:
  116. """
  117. Ensures that the remote repository is correctly added to the
  118. master repository.
  119. Parameters:
  120. remote_info (RemoteInfo): Information about the remote repository.
  121. """
  122. try:
  123. self.__master_repo.remote_add(
  124. remote=remote_info.name,
  125. url=remote_info.url,
  126. )
  127. self.logger.info(
  128. f"Added remote {remote_info.name} to master repo."
  129. )
  130. except ap_git.DuplicateRemoteError:
  131. self.logger.debug(
  132. f"Remote {remote_info.name} already exists."
  133. f"Setting URL to {remote_info.url}."
  134. )
  135. # Update the URL if the remote already exists
  136. self.__master_repo.remote_set_url(
  137. remote=remote_info.name,
  138. url=remote_info.url,
  139. )
  140. self.logger.info(
  141. f"Updated remote url to {remote_info.url}"
  142. f"for remote {remote_info.name}"
  143. )
  144. def __provision_build_source(self, build_id: str) -> None:
  145. """
  146. Provisions the source code for a specific build.
  147. Parameters:
  148. build_id (str): Unique identifier for the build.
  149. """
  150. # Log to build log
  151. logpath = bm.get_singleton().get_build_log_path(build_id)
  152. with open(logpath, "a") as build_log:
  153. build_log.write("Cloning build source...\n")
  154. build_info = bm.get_singleton().get_build_info(build_id)
  155. logging.info(
  156. f"Ensuring {build_info.remote_info.name} is added to master repo."
  157. )
  158. self.__ensure_remote_added(build_info.remote_info)
  159. logging.info(f"Cloning build source for {build_id} from master repo.")
  160. ap_git.GitRepo.shallow_clone_at_commit_from_local(
  161. source=self.__master_repo.get_local_path(),
  162. remote=build_info.remote_info.name,
  163. commit_ref=build_info.git_hash,
  164. dest=self.__get_path_to_build_src(build_id),
  165. )
  166. def __create_build_artifacts_dir(self, build_id: str) -> None:
  167. """
  168. Creates the output directory to store build artifacts.
  169. Parameters:
  170. build_id (str): Unique identifier for the build.
  171. """
  172. p = Path(bm.get_singleton().get_build_artifacts_dir_path(build_id))
  173. self.logger.info(f"Creating directory at {p}.")
  174. try:
  175. Path.mkdir(p, parents=True)
  176. except FileExistsError:
  177. shutil.rmtree(p)
  178. Path.mkdir(p)
  179. def __create_build_workdir(self, build_id: str) -> None:
  180. """
  181. Creates the working directory for the build.
  182. Parameters:
  183. build_id (str): Unique identifier for the build.
  184. """
  185. p = Path(self.__get_path_to_build_dir(build_id))
  186. self.logger.info(f"Creating directory at {p}.")
  187. try:
  188. Path.mkdir(p, parents=True)
  189. except FileExistsError:
  190. shutil.rmtree(p)
  191. Path.mkdir(p)
  192. def __generate_archive(self, build_id: str) -> None:
  193. """
  194. Placeholder for generating the zipped build artifact.
  195. Parameters:
  196. build_id (str): Unique identifier for the build.
  197. """
  198. build_info = bm.get_singleton().get_build_info(build_id)
  199. archive_path = bm.get_singleton().get_build_archive_path(build_id)
  200. files_to_include = []
  201. # include binaries
  202. bin_path = os.path.join(
  203. self.__get_path_to_build_dir(build_id),
  204. build_info.board,
  205. "bin"
  206. )
  207. # Ensure bin_path exists
  208. Path.mkdir(bin_path, exist_ok=True)
  209. bin_list = os.listdir(bin_path)
  210. self.logger.debug(f"bin_path: {bin_path}")
  211. self.logger.debug(f"bin_list: {bin_list}")
  212. for file in bin_list:
  213. file_path_abs = os.path.abspath(
  214. os.path.join(bin_path, file)
  215. )
  216. files_to_include.append(file_path_abs)
  217. # include log
  218. log_path_abs = os.path.abspath(
  219. bm.get_singleton().get_build_log_path(build_id)
  220. )
  221. files_to_include.append(log_path_abs)
  222. # include extra_hwdef.dat
  223. extra_hwdef_path_abs = os.path.abspath(
  224. self.__get_path_to_extra_hwdef(build_id)
  225. )
  226. files_to_include.append(extra_hwdef_path_abs)
  227. # create archive
  228. with tarfile.open(archive_path, "w:gz") as tar:
  229. for file in files_to_include:
  230. arcname = f"{build_id}/{os.path.basename(file)}"
  231. self.logger.debug(f"Added {file} as {arcname}")
  232. tar.add(file, arcname=arcname)
  233. self.logger.info(f"Generated {archive_path}.")
  234. def __clean_up_build_workdir(self, build_id: str) -> None:
  235. shutil.rmtree(self.__get_path_to_build_dir(build_id))
  236. def __process_build(self, build_id: str) -> None:
  237. """
  238. Processes a new build by preparing source code and extra_hwdef file
  239. and running the build finally.
  240. Parameters:
  241. build_id (str): Unique identifier for the build.
  242. """
  243. self.__create_build_workdir(build_id)
  244. self.__create_build_artifacts_dir(build_id)
  245. self.__log_build_info(build_id)
  246. self.__provision_build_source(build_id)
  247. self.__generate_extrahwdef(build_id)
  248. self.__build(build_id)
  249. self.__generate_archive(build_id)
  250. self.__clean_up_build_workdir(build_id)
  251. def __get_path_to_build_dir(self, build_id: str) -> str:
  252. """
  253. Returns the path to the temporary workspace for a build.
  254. This directory contains the source code and extra_hwdef.dat file.
  255. Parameters:
  256. build_id (str): Unique identifier for the build.
  257. Returns:
  258. str: Path to the build directory.
  259. """
  260. return os.path.join(self.__workdir_parent, build_id)
  261. def __get_path_to_extra_hwdef(self, build_id: str) -> str:
  262. """
  263. Returns the path to the extra_hwdef definition file for a build.
  264. Parameters:
  265. build_id (str): Unique identifier for the build.
  266. Returns:
  267. str: Path to the extra hardware definition file.
  268. """
  269. return os.path.join(
  270. self.__get_path_to_build_dir(build_id),
  271. "extra_hwdef.dat",
  272. )
  273. def __get_path_to_build_src(self, build_id: str) -> str:
  274. """
  275. Returns the path to the source code for a build.
  276. Parameters:
  277. build_id (str): Unique identifier for the build.
  278. Returns:
  279. str: Path to the build source directory.
  280. """
  281. return os.path.join(
  282. self.__get_path_to_build_dir(build_id),
  283. "build_src"
  284. )
  285. def __build(self, build_id: str) -> None:
  286. """
  287. Executes the actual build process for a build.
  288. This should be called after preparing build source code and
  289. extra_hwdef file.
  290. Parameters:
  291. build_id (str): Unique identifier for the build.
  292. Raises:
  293. RuntimeError: If source directory or extra hardware definition
  294. file does not exist.
  295. """
  296. if not os.path.exists(self.__get_path_to_build_dir(build_id)):
  297. raise RuntimeError("Creating build before building.")
  298. if not os.path.exists(self.__get_path_to_build_src(build_id)):
  299. raise RuntimeError("Cannot build without source code.")
  300. if not os.path.exists(self.__get_path_to_extra_hwdef(build_id)):
  301. raise RuntimeError("Cannot build without extra_hwdef.dat file.")
  302. build_info = bm.get_singleton().get_build_info(build_id)
  303. source_repo = ap_git.GitRepo(self.__get_path_to_build_src(build_id))
  304. # Checkout the specific commit and ensure submodules are updated
  305. source_repo.checkout_remote_commit_ref(
  306. remote=build_info.remote_info.name,
  307. commit_ref=build_info.git_hash,
  308. force=True,
  309. hard_reset=True,
  310. clean_working_tree=True,
  311. )
  312. source_repo.submodule_update(init=True, recursive=True, force=True)
  313. logpath = bm.get_singleton().get_build_log_path(build_id)
  314. with open(logpath, "a") as build_log:
  315. # Log initial configuration
  316. build_log.write(
  317. "Setting vehicle to: "
  318. f"{build_info.vehicle.capitalize()}\n"
  319. )
  320. build_log.flush()
  321. # Run the build steps
  322. self.logger.info("Running waf configure")
  323. build_log.write("Running waf configure\n")
  324. build_log.flush()
  325. subprocess.run(
  326. [
  327. "python3",
  328. "./waf",
  329. "configure",
  330. "--board",
  331. build_info.board,
  332. "--out",
  333. self.__get_path_to_build_dir(build_id),
  334. "--extra-hwdef",
  335. self.__get_path_to_extra_hwdef(build_id),
  336. ],
  337. cwd=self.__get_path_to_build_src(build_id),
  338. stdout=build_log,
  339. stderr=build_log,
  340. shell=False,
  341. )
  342. self.logger.info("Running clean")
  343. build_log.write("Running clean\n")
  344. build_log.flush()
  345. subprocess.run(
  346. ["python3", "./waf", "clean"],
  347. cwd=self.__get_path_to_build_src(build_id),
  348. stdout=build_log,
  349. stderr=build_log,
  350. shell=False,
  351. )
  352. self.logger.info("Running build")
  353. build_log.write("Running build\n")
  354. build_log.flush()
  355. vehicle = vehm.get_singleton().get_vehicle_from_name(
  356. vehicle_name=build_info.vehicle
  357. )
  358. build_command = vehicle.waf_build_command
  359. subprocess.run(
  360. ["python3", "./waf", build_command],
  361. cwd=self.__get_path_to_build_src(build_id),
  362. stdout=build_log,
  363. stderr=build_log,
  364. shell=False,
  365. )
  366. build_log.write("done build\n")
  367. build_log.flush()
  368. def run(self) -> None:
  369. """
  370. Continuously processes builds in the queue until termination.
  371. """
  372. while True:
  373. build_to_process = bm.get_singleton().get_next_build_id()
  374. self.__process_build(build_id=build_to_process)