builder.py 17 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470
  1. import ap_git
  2. from build_manager import (
  3. BuildManager as bm,
  4. )
  5. import subprocess
  6. import os
  7. import shutil
  8. import logging
  9. import tarfile
  10. from metadata_manager import (
  11. APSourceMetadataFetcher as apfetch,
  12. RemoteInfo,
  13. VehiclesManager as vehm
  14. )
  15. from pathlib import Path
  16. CBS_BUILD_TIMEOUT_SEC = int(os.getenv('CBS_BUILD_TIMEOUT_SEC', 900))
  17. class Builder:
  18. """
  19. Processes build requests, perform builds and ship build artifacts
  20. to the destination directory shared by BuildManager.
  21. """
  22. def __init__(self, workdir: str, source_repo: ap_git.GitRepo) -> None:
  23. """
  24. Initialises the Builder class.
  25. Parameters:
  26. workdir (str): Workspace for the builder.
  27. source_repo (ap_git.GitRepo): Ardupilot repository to be used for
  28. retrieving source for doing builds.
  29. Raises:
  30. RuntimeError: If BuildManager or APSourceMetadataFetcher is not
  31. initialised.
  32. """
  33. if bm.get_singleton() is None:
  34. raise RuntimeError(
  35. "BuildManager should be initialized first."
  36. )
  37. if apfetch.get_singleton() is None:
  38. raise RuntimeError(
  39. "APSourceMetadataFetcher should be initialised first."
  40. )
  41. if vehm.get_singleton() is None:
  42. raise RuntimeError(
  43. "VehiclesManager should be initialised first."
  44. )
  45. self.__workdir_parent = workdir
  46. self.__master_repo = source_repo
  47. self.logger = logging.getLogger(__name__)
  48. self.__shutdown_requested = False
  49. def __log_build_info(self, build_id: str) -> None:
  50. """
  51. Logs the build information to the build log.
  52. Parameters:
  53. build_id (str): Unique identifier for the build.
  54. """
  55. build_info = bm.get_singleton().get_build_info(build_id)
  56. logpath = bm.get_singleton().get_build_log_path(build_id)
  57. with open(logpath, "a") as build_log:
  58. build_log.write(f"Vehicle ID: {build_info.vehicle_id}\n"
  59. f"Board: {build_info.board}\n"
  60. f"Remote URL: {build_info.remote_info.url}\n"
  61. f"git-sha: {build_info.git_hash}\n"
  62. "---\n"
  63. "Selected Features:\n")
  64. for d in build_info.selected_features:
  65. build_log.write(f"{d}\n")
  66. build_log.write("---\n")
  67. def __generate_extrahwdef(self, build_id: str) -> None:
  68. """
  69. Generates the extra hardware definition file (`extra_hwdef.dat`) for
  70. the build.
  71. Parameters:
  72. build_id (str): Unique identifier for the build.
  73. Raises:
  74. RuntimeError: If the parent directory for putting `extra_hwdef.dat`
  75. does not exist.
  76. """
  77. # Log to build log
  78. logpath = bm.get_singleton().get_build_log_path(build_id)
  79. with open(logpath, "a") as build_log:
  80. build_log.write("Generating extrahwdef file...\n")
  81. path = self.__get_path_to_extra_hwdef(build_id)
  82. self.logger.debug(
  83. f"Path to extra_hwdef for build id {build_id}: {path}"
  84. )
  85. if not os.path.exists(os.path.dirname(path)):
  86. raise RuntimeError(
  87. f"Create parent directory '{os.path.dirname(path)}' "
  88. "before writing extra_hwdef.dat"
  89. )
  90. build_info = bm.get_singleton().get_build_info(build_id)
  91. selected_features = build_info.selected_features
  92. self.logger.debug(
  93. f"Selected features for {build_id}: {selected_features}"
  94. )
  95. all_features = apfetch.get_singleton().get_build_options_at_commit(
  96. remote=build_info.remote_info.name,
  97. commit_ref=build_info.git_hash,
  98. )
  99. all_defines = {
  100. feature.define
  101. for feature in all_features
  102. }
  103. enabled_defines = selected_features.intersection(all_defines)
  104. disabled_defines = all_defines.difference(enabled_defines)
  105. self.logger.info(f"Enabled defines for {build_id}: {enabled_defines}")
  106. self.logger.info(f"Disabled defines for {build_id}: {enabled_defines}")
  107. with open(self.__get_path_to_extra_hwdef(build_id), "w") as f:
  108. # Undefine all defines at the beginning
  109. for define in all_defines:
  110. f.write(f"undef {define}\n")
  111. # Enable selected defines
  112. for define in enabled_defines:
  113. f.write(f"define {define} 1\n")
  114. # Disable the remaining defines
  115. for define in disabled_defines:
  116. f.write(f"define {define} 0\n")
  117. def __ensure_remote_added(self, remote_info: RemoteInfo) -> None:
  118. """
  119. Ensures that the remote repository is correctly added to the
  120. master repository.
  121. Parameters:
  122. remote_info (RemoteInfo): Information about the remote repository.
  123. """
  124. try:
  125. self.__master_repo.remote_add(
  126. remote=remote_info.name,
  127. url=remote_info.url,
  128. )
  129. self.logger.info(
  130. f"Added remote {remote_info.name} to master repo."
  131. )
  132. except ap_git.DuplicateRemoteError:
  133. self.logger.debug(
  134. f"Remote {remote_info.name} already exists."
  135. f"Setting URL to {remote_info.url}."
  136. )
  137. # Update the URL if the remote already exists
  138. self.__master_repo.remote_set_url(
  139. remote=remote_info.name,
  140. url=remote_info.url,
  141. )
  142. self.logger.info(
  143. f"Updated remote url to {remote_info.url}"
  144. f"for remote {remote_info.name}"
  145. )
  146. def __provision_build_source(self, build_id: str) -> None:
  147. """
  148. Provisions the source code for a specific build.
  149. Parameters:
  150. build_id (str): Unique identifier for the build.
  151. """
  152. # Log to build log
  153. logpath = bm.get_singleton().get_build_log_path(build_id)
  154. with open(logpath, "a") as build_log:
  155. build_log.write("Cloning build source...\n")
  156. build_info = bm.get_singleton().get_build_info(build_id)
  157. logging.info(
  158. f"Ensuring {build_info.remote_info.name} is added to master repo."
  159. )
  160. self.__ensure_remote_added(build_info.remote_info)
  161. logging.info(f"Cloning build source for {build_id} from master repo.")
  162. ap_git.GitRepo.shallow_clone_at_commit_from_local(
  163. source=self.__master_repo.get_local_path(),
  164. remote=build_info.remote_info.name,
  165. commit_ref=build_info.git_hash,
  166. dest=self.__get_path_to_build_src(build_id),
  167. )
  168. def __create_build_artifacts_dir(self, build_id: str) -> None:
  169. """
  170. Creates the output directory to store build artifacts.
  171. Parameters:
  172. build_id (str): Unique identifier for the build.
  173. """
  174. p = Path(bm.get_singleton().get_build_artifacts_dir_path(build_id))
  175. self.logger.info(f"Creating directory at {p}.")
  176. try:
  177. Path.mkdir(p, parents=True)
  178. except FileExistsError:
  179. shutil.rmtree(p)
  180. Path.mkdir(p)
  181. def __create_build_workdir(self, build_id: str) -> None:
  182. """
  183. Creates the working directory for the build.
  184. Parameters:
  185. build_id (str): Unique identifier for the build.
  186. """
  187. p = Path(self.__get_path_to_build_dir(build_id))
  188. self.logger.info(f"Creating directory at {p}.")
  189. try:
  190. Path.mkdir(p, parents=True)
  191. except FileExistsError:
  192. shutil.rmtree(p)
  193. Path.mkdir(p)
  194. def __generate_archive(self, build_id: str) -> None:
  195. """
  196. Placeholder for generating the zipped build artifact.
  197. Parameters:
  198. build_id (str): Unique identifier for the build.
  199. """
  200. build_info = bm.get_singleton().get_build_info(build_id)
  201. archive_path = bm.get_singleton().get_build_archive_path(build_id)
  202. files_to_include = []
  203. # include binaries
  204. bin_path = os.path.join(
  205. self.__get_path_to_build_dir(build_id),
  206. build_info.board,
  207. "bin"
  208. )
  209. # Ensure bin_path exists
  210. Path.mkdir(Path(bin_path), exist_ok=True)
  211. bin_list = os.listdir(bin_path)
  212. self.logger.debug(f"bin_path: {bin_path}")
  213. self.logger.debug(f"bin_list: {bin_list}")
  214. for file in bin_list:
  215. file_path_abs = os.path.abspath(
  216. os.path.join(bin_path, file)
  217. )
  218. files_to_include.append(file_path_abs)
  219. # include log
  220. log_path_abs = os.path.abspath(
  221. bm.get_singleton().get_build_log_path(build_id)
  222. )
  223. files_to_include.append(log_path_abs)
  224. # include extra_hwdef.dat
  225. extra_hwdef_path_abs = os.path.abspath(
  226. self.__get_path_to_extra_hwdef(build_id)
  227. )
  228. files_to_include.append(extra_hwdef_path_abs)
  229. # create archive
  230. with tarfile.open(archive_path, "w:gz") as tar:
  231. for file in files_to_include:
  232. arcname = f"{build_id}/{os.path.basename(file)}"
  233. self.logger.debug(f"Added {file} as {arcname}")
  234. tar.add(file, arcname=arcname)
  235. self.logger.info(f"Generated {archive_path}.")
  236. def __clean_up_build_workdir(self, build_id: str) -> None:
  237. shutil.rmtree(self.__get_path_to_build_dir(build_id))
  238. def __process_build(self, build_id: str) -> None:
  239. """
  240. Processes a new build by preparing source code and extra_hwdef file
  241. and running the build finally.
  242. Parameters:
  243. build_id (str): Unique identifier for the build.
  244. """
  245. self.__create_build_workdir(build_id)
  246. self.__create_build_artifacts_dir(build_id)
  247. self.__log_build_info(build_id)
  248. self.__provision_build_source(build_id)
  249. self.__generate_extrahwdef(build_id)
  250. self.__build(build_id)
  251. self.__generate_archive(build_id)
  252. self.__clean_up_build_workdir(build_id)
  253. def __get_path_to_build_dir(self, build_id: str) -> str:
  254. """
  255. Returns the path to the temporary workspace for a build.
  256. This directory contains the source code and extra_hwdef.dat file.
  257. Parameters:
  258. build_id (str): Unique identifier for the build.
  259. Returns:
  260. str: Path to the build directory.
  261. """
  262. return os.path.join(self.__workdir_parent, build_id)
  263. def __get_path_to_extra_hwdef(self, build_id: str) -> str:
  264. """
  265. Returns the path to the extra_hwdef definition file for a build.
  266. Parameters:
  267. build_id (str): Unique identifier for the build.
  268. Returns:
  269. str: Path to the extra hardware definition file.
  270. """
  271. return os.path.join(
  272. self.__get_path_to_build_dir(build_id),
  273. "extra_hwdef.dat",
  274. )
  275. def __get_path_to_build_src(self, build_id: str) -> str:
  276. """
  277. Returns the path to the source code for a build.
  278. Parameters:
  279. build_id (str): Unique identifier for the build.
  280. Returns:
  281. str: Path to the build source directory.
  282. """
  283. return os.path.join(
  284. self.__get_path_to_build_dir(build_id),
  285. "build_src"
  286. )
  287. def __build(self, build_id: str) -> None:
  288. """
  289. Executes the actual build process for a build.
  290. This should be called after preparing build source code and
  291. extra_hwdef file.
  292. Parameters:
  293. build_id (str): Unique identifier for the build.
  294. Raises:
  295. RuntimeError: If source directory or extra hardware definition
  296. file does not exist.
  297. """
  298. if not os.path.exists(self.__get_path_to_build_dir(build_id)):
  299. raise RuntimeError("Creating build before building.")
  300. if not os.path.exists(self.__get_path_to_build_src(build_id)):
  301. raise RuntimeError("Cannot build without source code.")
  302. if not os.path.exists(self.__get_path_to_extra_hwdef(build_id)):
  303. raise RuntimeError("Cannot build without extra_hwdef.dat file.")
  304. build_info = bm.get_singleton().get_build_info(build_id)
  305. source_repo = ap_git.GitRepo(self.__get_path_to_build_src(build_id))
  306. # Checkout the specific commit and ensure submodules are updated
  307. source_repo.checkout_remote_commit_ref(
  308. remote=build_info.remote_info.name,
  309. commit_ref=build_info.git_hash,
  310. force=True,
  311. hard_reset=True,
  312. clean_working_tree=True,
  313. )
  314. source_repo.submodule_update(init=True, recursive=True, force=True)
  315. logpath = bm.get_singleton().get_build_log_path(build_id)
  316. with open(logpath, "a") as build_log:
  317. # Get vehicle object
  318. vehicle = vehm.get_singleton().get_vehicle_by_id(
  319. build_info.vehicle_id
  320. )
  321. # Log initial configuration
  322. build_log.write(
  323. "Setting vehicle to: "
  324. f"{vehicle.name.capitalize()}\n"
  325. )
  326. build_log.flush()
  327. try:
  328. # Run the build steps
  329. self.logger.info("Running waf configure")
  330. build_log.write("Running waf configure\n")
  331. build_log.flush()
  332. subprocess.run(
  333. [
  334. "python3",
  335. "./waf",
  336. "configure",
  337. "--board",
  338. build_info.board,
  339. "--out",
  340. self.__get_path_to_build_dir(build_id),
  341. "--extra-hwdef",
  342. self.__get_path_to_extra_hwdef(build_id),
  343. ],
  344. cwd=self.__get_path_to_build_src(build_id),
  345. stdout=build_log,
  346. stderr=build_log,
  347. shell=False,
  348. timeout=CBS_BUILD_TIMEOUT_SEC,
  349. )
  350. self.logger.info("Running clean")
  351. build_log.write("Running clean\n")
  352. build_log.flush()
  353. subprocess.run(
  354. ["python3", "./waf", "clean"],
  355. cwd=self.__get_path_to_build_src(build_id),
  356. stdout=build_log,
  357. stderr=build_log,
  358. shell=False,
  359. timeout=CBS_BUILD_TIMEOUT_SEC,
  360. )
  361. self.logger.info("Running build")
  362. build_log.write("Running build\n")
  363. build_log.flush()
  364. build_command = vehicle.waf_build_command
  365. subprocess.run(
  366. ["python3", "./waf", build_command],
  367. cwd=self.__get_path_to_build_src(build_id),
  368. stdout=build_log,
  369. stderr=build_log,
  370. shell=False,
  371. timeout=CBS_BUILD_TIMEOUT_SEC,
  372. )
  373. build_log.write("done build\n")
  374. build_log.flush()
  375. except subprocess.TimeoutExpired:
  376. self.logger.error(
  377. f"Build {build_id} timed out after "
  378. f"{CBS_BUILD_TIMEOUT_SEC} seconds."
  379. )
  380. build_log.write(
  381. f"Build timed out after {CBS_BUILD_TIMEOUT_SEC} seconds.\n"
  382. )
  383. build_log.flush()
  384. def shutdown(self) -> None:
  385. """
  386. Request graceful shutdown of the builder.
  387. """
  388. self.logger.info("Shutdown requested")
  389. self.__shutdown_requested = True
  390. def run(self) -> None:
  391. """
  392. Continuously processes builds in the queue until shutdown is requested.
  393. Completes any build that has been popped from the queue before
  394. checking shutdown status.
  395. """
  396. self.logger.info("Builder started and waiting for builds...")
  397. while not self.__shutdown_requested:
  398. build_to_process = bm.get_singleton().get_next_build_id(
  399. timeout=5
  400. )
  401. if build_to_process is None:
  402. # Timeout occurred, no build available
  403. # Loop will check shutdown flag and continue or exit
  404. continue
  405. # We got a build from queue, process it regardless of shutdown
  406. # This ensures we complete any work we've taken responsibility for
  407. self.logger.info(f"Processing build {build_to_process}")
  408. self.__process_build(build_id=build_to_process)
  409. self.logger.info("Builder shutting down gracefully")