builder.py 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453
  1. import ap_git
  2. from build_manager import (
  3. BuildManager as bm,
  4. )
  5. import subprocess
  6. import os
  7. import shutil
  8. import logging
  9. import tarfile
  10. from metadata_manager import (
  11. APSourceMetadataFetcher as apfetch,
  12. RemoteInfo,
  13. VehiclesManager as vehm
  14. )
  15. from pathlib import Path
  16. class Builder:
  17. """
  18. Processes build requests, perform builds and ship build artifacts
  19. to the destination directory shared by BuildManager.
  20. """
  21. def __init__(self, workdir: str, source_repo: ap_git.GitRepo) -> None:
  22. """
  23. Initialises the Builder class.
  24. Parameters:
  25. workdir (str): Workspace for the builder.
  26. source_repo (ap_git.GitRepo): Ardupilot repository to be used for
  27. retrieving source for doing builds.
  28. Raises:
  29. RuntimeError: If BuildManager or APSourceMetadataFetcher is not
  30. initialised.
  31. """
  32. if bm.get_singleton() is None:
  33. raise RuntimeError(
  34. "BuildManager should be initialized first."
  35. )
  36. if apfetch.get_singleton() is None:
  37. raise RuntimeError(
  38. "APSourceMetadataFetcher should be initialised first."
  39. )
  40. if vehm.get_singleton() is None:
  41. raise RuntimeError(
  42. "VehiclesManager should be initialised first."
  43. )
  44. self.__workdir_parent = workdir
  45. self.__master_repo = source_repo
  46. self.logger = logging.getLogger(__name__)
  47. self.__shutdown_requested = False
  48. def __log_build_info(self, build_id: str) -> None:
  49. """
  50. Logs the build information to the build log.
  51. Parameters:
  52. build_id (str): Unique identifier for the build.
  53. """
  54. build_info = bm.get_singleton().get_build_info(build_id)
  55. logpath = bm.get_singleton().get_build_log_path(build_id)
  56. with open(logpath, "a") as build_log:
  57. build_log.write(f"Vehicle ID: {build_info.vehicle_id}\n"
  58. f"Board: {build_info.board}\n"
  59. f"Remote URL: {build_info.remote_info.url}\n"
  60. f"git-sha: {build_info.git_hash}\n"
  61. "---\n"
  62. "Selected Features:\n")
  63. for d in build_info.selected_features:
  64. build_log.write(f"{d}\n")
  65. build_log.write("---\n")
  66. def __generate_extrahwdef(self, build_id: str) -> None:
  67. """
  68. Generates the extra hardware definition file (`extra_hwdef.dat`) for
  69. the build.
  70. Parameters:
  71. build_id (str): Unique identifier for the build.
  72. Raises:
  73. RuntimeError: If the parent directory for putting `extra_hwdef.dat`
  74. does not exist.
  75. """
  76. # Log to build log
  77. logpath = bm.get_singleton().get_build_log_path(build_id)
  78. with open(logpath, "a") as build_log:
  79. build_log.write("Generating extrahwdef file...\n")
  80. path = self.__get_path_to_extra_hwdef(build_id)
  81. self.logger.debug(
  82. f"Path to extra_hwdef for build id {build_id}: {path}"
  83. )
  84. if not os.path.exists(os.path.dirname(path)):
  85. raise RuntimeError(
  86. f"Create parent directory '{os.path.dirname(path)}' "
  87. "before writing extra_hwdef.dat"
  88. )
  89. build_info = bm.get_singleton().get_build_info(build_id)
  90. selected_features = build_info.selected_features
  91. self.logger.debug(
  92. f"Selected features for {build_id}: {selected_features}"
  93. )
  94. all_features = apfetch.get_singleton().get_build_options_at_commit(
  95. remote=build_info.remote_info.name,
  96. commit_ref=build_info.git_hash,
  97. )
  98. all_defines = {
  99. feature.define
  100. for feature in all_features
  101. }
  102. enabled_defines = selected_features.intersection(all_defines)
  103. disabled_defines = all_defines.difference(enabled_defines)
  104. self.logger.info(f"Enabled defines for {build_id}: {enabled_defines}")
  105. self.logger.info(f"Disabled defines for {build_id}: {enabled_defines}")
  106. with open(self.__get_path_to_extra_hwdef(build_id), "w") as f:
  107. # Undefine all defines at the beginning
  108. for define in all_defines:
  109. f.write(f"undef {define}\n")
  110. # Enable selected defines
  111. for define in enabled_defines:
  112. f.write(f"define {define} 1\n")
  113. # Disable the remaining defines
  114. for define in disabled_defines:
  115. f.write(f"define {define} 0\n")
  116. def __ensure_remote_added(self, remote_info: RemoteInfo) -> None:
  117. """
  118. Ensures that the remote repository is correctly added to the
  119. master repository.
  120. Parameters:
  121. remote_info (RemoteInfo): Information about the remote repository.
  122. """
  123. try:
  124. self.__master_repo.remote_add(
  125. remote=remote_info.name,
  126. url=remote_info.url,
  127. )
  128. self.logger.info(
  129. f"Added remote {remote_info.name} to master repo."
  130. )
  131. except ap_git.DuplicateRemoteError:
  132. self.logger.debug(
  133. f"Remote {remote_info.name} already exists."
  134. f"Setting URL to {remote_info.url}."
  135. )
  136. # Update the URL if the remote already exists
  137. self.__master_repo.remote_set_url(
  138. remote=remote_info.name,
  139. url=remote_info.url,
  140. )
  141. self.logger.info(
  142. f"Updated remote url to {remote_info.url}"
  143. f"for remote {remote_info.name}"
  144. )
  145. def __provision_build_source(self, build_id: str) -> None:
  146. """
  147. Provisions the source code for a specific build.
  148. Parameters:
  149. build_id (str): Unique identifier for the build.
  150. """
  151. # Log to build log
  152. logpath = bm.get_singleton().get_build_log_path(build_id)
  153. with open(logpath, "a") as build_log:
  154. build_log.write("Cloning build source...\n")
  155. build_info = bm.get_singleton().get_build_info(build_id)
  156. logging.info(
  157. f"Ensuring {build_info.remote_info.name} is added to master repo."
  158. )
  159. self.__ensure_remote_added(build_info.remote_info)
  160. logging.info(f"Cloning build source for {build_id} from master repo.")
  161. ap_git.GitRepo.shallow_clone_at_commit_from_local(
  162. source=self.__master_repo.get_local_path(),
  163. remote=build_info.remote_info.name,
  164. commit_ref=build_info.git_hash,
  165. dest=self.__get_path_to_build_src(build_id),
  166. )
  167. def __create_build_artifacts_dir(self, build_id: str) -> None:
  168. """
  169. Creates the output directory to store build artifacts.
  170. Parameters:
  171. build_id (str): Unique identifier for the build.
  172. """
  173. p = Path(bm.get_singleton().get_build_artifacts_dir_path(build_id))
  174. self.logger.info(f"Creating directory at {p}.")
  175. try:
  176. Path.mkdir(p, parents=True)
  177. except FileExistsError:
  178. shutil.rmtree(p)
  179. Path.mkdir(p)
  180. def __create_build_workdir(self, build_id: str) -> None:
  181. """
  182. Creates the working directory for the build.
  183. Parameters:
  184. build_id (str): Unique identifier for the build.
  185. """
  186. p = Path(self.__get_path_to_build_dir(build_id))
  187. self.logger.info(f"Creating directory at {p}.")
  188. try:
  189. Path.mkdir(p, parents=True)
  190. except FileExistsError:
  191. shutil.rmtree(p)
  192. Path.mkdir(p)
  193. def __generate_archive(self, build_id: str) -> None:
  194. """
  195. Placeholder for generating the zipped build artifact.
  196. Parameters:
  197. build_id (str): Unique identifier for the build.
  198. """
  199. build_info = bm.get_singleton().get_build_info(build_id)
  200. archive_path = bm.get_singleton().get_build_archive_path(build_id)
  201. files_to_include = []
  202. # include binaries
  203. bin_path = os.path.join(
  204. self.__get_path_to_build_dir(build_id),
  205. build_info.board,
  206. "bin"
  207. )
  208. # Ensure bin_path exists
  209. Path.mkdir(Path(bin_path), exist_ok=True)
  210. bin_list = os.listdir(bin_path)
  211. self.logger.debug(f"bin_path: {bin_path}")
  212. self.logger.debug(f"bin_list: {bin_list}")
  213. for file in bin_list:
  214. file_path_abs = os.path.abspath(
  215. os.path.join(bin_path, file)
  216. )
  217. files_to_include.append(file_path_abs)
  218. # include log
  219. log_path_abs = os.path.abspath(
  220. bm.get_singleton().get_build_log_path(build_id)
  221. )
  222. files_to_include.append(log_path_abs)
  223. # include extra_hwdef.dat
  224. extra_hwdef_path_abs = os.path.abspath(
  225. self.__get_path_to_extra_hwdef(build_id)
  226. )
  227. files_to_include.append(extra_hwdef_path_abs)
  228. # create archive
  229. with tarfile.open(archive_path, "w:gz") as tar:
  230. for file in files_to_include:
  231. arcname = f"{build_id}/{os.path.basename(file)}"
  232. self.logger.debug(f"Added {file} as {arcname}")
  233. tar.add(file, arcname=arcname)
  234. self.logger.info(f"Generated {archive_path}.")
  235. def __clean_up_build_workdir(self, build_id: str) -> None:
  236. shutil.rmtree(self.__get_path_to_build_dir(build_id))
  237. def __process_build(self, build_id: str) -> None:
  238. """
  239. Processes a new build by preparing source code and extra_hwdef file
  240. and running the build finally.
  241. Parameters:
  242. build_id (str): Unique identifier for the build.
  243. """
  244. self.__create_build_workdir(build_id)
  245. self.__create_build_artifacts_dir(build_id)
  246. self.__log_build_info(build_id)
  247. self.__provision_build_source(build_id)
  248. self.__generate_extrahwdef(build_id)
  249. self.__build(build_id)
  250. self.__generate_archive(build_id)
  251. self.__clean_up_build_workdir(build_id)
  252. def __get_path_to_build_dir(self, build_id: str) -> str:
  253. """
  254. Returns the path to the temporary workspace for a build.
  255. This directory contains the source code and extra_hwdef.dat file.
  256. Parameters:
  257. build_id (str): Unique identifier for the build.
  258. Returns:
  259. str: Path to the build directory.
  260. """
  261. return os.path.join(self.__workdir_parent, build_id)
  262. def __get_path_to_extra_hwdef(self, build_id: str) -> str:
  263. """
  264. Returns the path to the extra_hwdef definition file for a build.
  265. Parameters:
  266. build_id (str): Unique identifier for the build.
  267. Returns:
  268. str: Path to the extra hardware definition file.
  269. """
  270. return os.path.join(
  271. self.__get_path_to_build_dir(build_id),
  272. "extra_hwdef.dat",
  273. )
  274. def __get_path_to_build_src(self, build_id: str) -> str:
  275. """
  276. Returns the path to the source code for a build.
  277. Parameters:
  278. build_id (str): Unique identifier for the build.
  279. Returns:
  280. str: Path to the build source directory.
  281. """
  282. return os.path.join(
  283. self.__get_path_to_build_dir(build_id),
  284. "build_src"
  285. )
  286. def __build(self, build_id: str) -> None:
  287. """
  288. Executes the actual build process for a build.
  289. This should be called after preparing build source code and
  290. extra_hwdef file.
  291. Parameters:
  292. build_id (str): Unique identifier for the build.
  293. Raises:
  294. RuntimeError: If source directory or extra hardware definition
  295. file does not exist.
  296. """
  297. if not os.path.exists(self.__get_path_to_build_dir(build_id)):
  298. raise RuntimeError("Creating build before building.")
  299. if not os.path.exists(self.__get_path_to_build_src(build_id)):
  300. raise RuntimeError("Cannot build without source code.")
  301. if not os.path.exists(self.__get_path_to_extra_hwdef(build_id)):
  302. raise RuntimeError("Cannot build without extra_hwdef.dat file.")
  303. build_info = bm.get_singleton().get_build_info(build_id)
  304. source_repo = ap_git.GitRepo(self.__get_path_to_build_src(build_id))
  305. # Checkout the specific commit and ensure submodules are updated
  306. source_repo.checkout_remote_commit_ref(
  307. remote=build_info.remote_info.name,
  308. commit_ref=build_info.git_hash,
  309. force=True,
  310. hard_reset=True,
  311. clean_working_tree=True,
  312. )
  313. source_repo.submodule_update(init=True, recursive=True, force=True)
  314. logpath = bm.get_singleton().get_build_log_path(build_id)
  315. with open(logpath, "a") as build_log:
  316. # Get vehicle object
  317. vehicle = vehm.get_singleton().get_vehicle_by_id(build_info.vehicle_id)
  318. # Log initial configuration
  319. build_log.write(
  320. "Setting vehicle to: "
  321. f"{vehicle.name.capitalize()}\n"
  322. )
  323. build_log.flush()
  324. # Run the build steps
  325. self.logger.info("Running waf configure")
  326. build_log.write("Running waf configure\n")
  327. build_log.flush()
  328. subprocess.run(
  329. [
  330. "python3",
  331. "./waf",
  332. "configure",
  333. "--board",
  334. build_info.board,
  335. "--out",
  336. self.__get_path_to_build_dir(build_id),
  337. "--extra-hwdef",
  338. self.__get_path_to_extra_hwdef(build_id),
  339. ],
  340. cwd=self.__get_path_to_build_src(build_id),
  341. stdout=build_log,
  342. stderr=build_log,
  343. shell=False,
  344. )
  345. self.logger.info("Running clean")
  346. build_log.write("Running clean\n")
  347. build_log.flush()
  348. subprocess.run(
  349. ["python3", "./waf", "clean"],
  350. cwd=self.__get_path_to_build_src(build_id),
  351. stdout=build_log,
  352. stderr=build_log,
  353. shell=False,
  354. )
  355. self.logger.info("Running build")
  356. build_log.write("Running build\n")
  357. build_log.flush()
  358. build_command = vehicle.waf_build_command
  359. subprocess.run(
  360. ["python3", "./waf", build_command],
  361. cwd=self.__get_path_to_build_src(build_id),
  362. stdout=build_log,
  363. stderr=build_log,
  364. shell=False,
  365. )
  366. build_log.write("done build\n")
  367. build_log.flush()
  368. def shutdown(self) -> None:
  369. """
  370. Request graceful shutdown of the builder.
  371. """
  372. self.logger.info("Shutdown requested")
  373. self.__shutdown_requested = True
  374. def run(self) -> None:
  375. """
  376. Continuously processes builds in the queue until shutdown is requested.
  377. Completes any build that has been popped from the queue before
  378. checking shutdown status.
  379. """
  380. self.logger.info("Builder started and waiting for builds...")
  381. while not self.__shutdown_requested:
  382. build_to_process = bm.get_singleton().get_next_build_id(
  383. timeout=5
  384. )
  385. if build_to_process is None:
  386. # Timeout occurred, no build available
  387. # Loop will check shutdown flag and continue or exit
  388. continue
  389. # We got a build from queue, process it regardless of shutdown
  390. # This ensures we complete any work we've taken responsibility for
  391. self.logger.info(f"Processing build {build_to_process}")
  392. self.__process_build(build_id=build_to_process)
  393. self.logger.info("Builder shutting down gracefully")