core.py 24 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738
  1. import logging
  2. import time
  3. import os
  4. import fnmatch
  5. import ap_git
  6. import json
  7. import jsonschema
  8. import redis
  9. import dill
  10. from pathlib import Path
  11. from . import exceptions as ex
  12. from threading import Lock
  13. from utils import TaskRunner
  14. logger = logging.getLogger(__name__)
  15. class APSourceMetadataFetcher:
  16. """
  17. Class to fetch metadata like available boards, features etc.
  18. from the AP source code
  19. """
  20. __singleton = None
  21. def __init__(self, ap_repo: ap_git.GitRepo,
  22. caching_enabled: bool = False,
  23. redis_host: str = 'localhost',
  24. redis_port: str = '6379') -> None:
  25. """
  26. Initializes the APSourceMetadataFetcher instance
  27. with a given repository path.
  28. Parameters:
  29. ap_repo (GitRepo): ArduPilot local git repository containing
  30. the metadata generation scripts.
  31. caching_enabled (bool): Enable caching metadata for each commit to
  32. avoid checking out git repo each time.
  33. redis_host (str): Hostname of the Redis instance to use for caching
  34. metadata.
  35. redis_port (int): Port of the Redis instance to use for caching
  36. metadata
  37. Raises:
  38. TooManyInstancesError: If an instance of this class already exists,
  39. enforcing a singleton pattern.
  40. """
  41. # Enforce singleton pattern by raising an error if
  42. # an instance already exists.
  43. if APSourceMetadataFetcher.__singleton:
  44. raise ex.TooManyInstancesError()
  45. self.repo = ap_repo
  46. self.caching_enabled = caching_enabled
  47. if self.caching_enabled:
  48. self.__redis_client = redis.Redis(
  49. host=redis_host,
  50. port=redis_port,
  51. decode_responses=False,
  52. )
  53. logger.info(
  54. f"Redis connection established with {redis_host}:{redis_port}"
  55. )
  56. self.__boards_key_prefix = "boards-"
  57. self.__build_options_key_prefix = "bopts-"
  58. APSourceMetadataFetcher.__singleton = self
  59. def __boards_key(self, commit_id: str) -> str:
  60. """
  61. Generate the Redis key that stores the boards list for a given commit.
  62. Parameters:
  63. commit_id (str): The git sha for the commit.
  64. Returns:
  65. str: The Redis key containing the cached board list.
  66. """
  67. return self.__boards_key_prefix + f"{commit_id}"
  68. def __build_options_key(self, commit_id: str) -> str:
  69. """
  70. Generate the Redis key that stores the build options list for a given
  71. commit.
  72. Parameters:
  73. commit_id (str): The git sha for the commit.
  74. Returns:
  75. str: The Redis key containing the cached build options list.
  76. """
  77. return self.__build_options_key_prefix + f"{commit_id}"
  78. def __cache_boards_at_commit(self,
  79. boards: list,
  80. commit_id: str,
  81. ttl_sec: int = 86400) -> None:
  82. """
  83. Cache the given list of boards for a particular commit.
  84. Parameters:
  85. boards (list): The list of boards.
  86. commit_id (str): The git sha for the commit.
  87. ttl_sec (int): Time-to-live (TTL) in seconds after which the
  88. cached list expires.
  89. Raises:
  90. RuntimeError: If the method is called when caching is disabled.
  91. """
  92. if not self.caching_enabled:
  93. raise RuntimeError("Should not be called with caching disabled.")
  94. key = self.__boards_key(commit_id=commit_id)
  95. logger.debug(
  96. "Caching boards list "
  97. f"Redis key: {key}, "
  98. f"Boards: {boards}, "
  99. f"TTL: {ttl_sec} sec"
  100. )
  101. self.__redis_client.set(
  102. name=key,
  103. value=dill.dumps(boards),
  104. ex=ttl_sec
  105. )
  106. def __cache_build_options_at_commit(self,
  107. build_options: list,
  108. commit_id: str,
  109. ttl_sec: int = 86400) -> None:
  110. """
  111. Cache the given list of build options for a particular commit.
  112. Parameters:
  113. build_options (list): The list of build options.
  114. commit_id (str): The git sha for the commit.
  115. ttl_sec (int): Time-to-live (TTL) in seconds after which the
  116. cached list expires.
  117. Raises:
  118. RuntimeError: If the method is called when caching is disabled.
  119. """
  120. if not self.caching_enabled:
  121. raise RuntimeError("Should not be called with caching disabled.")
  122. key = self.__build_options_key(commit_id=commit_id)
  123. logger.debug(
  124. "Caching build options "
  125. f"Redis key: {key}, "
  126. f"Build Options: {build_options}, "
  127. f"TTL: {ttl_sec} sec"
  128. )
  129. self.__redis_client.set(
  130. name=key,
  131. value=dill.dumps(build_options),
  132. ex=ttl_sec
  133. )
  134. def __get_build_options_at_commit_from_cache(self,
  135. commit_id: str) -> list:
  136. """
  137. Retrieves a list of build options available at a specified commit
  138. from cache if exists, None otherwise.
  139. Parameters:
  140. commit_id (str): The commit id to get build options for.
  141. Returns:
  142. list: A list of build options available at the specified commit.
  143. Raises:
  144. RuntimeError: If the method is called when caching is disabled.
  145. """
  146. if not self.caching_enabled:
  147. raise RuntimeError("Should not be called with caching disabled.")
  148. key = self.__build_options_key(commit_id=commit_id)
  149. logger.debug(
  150. f"Getting cached build options for commit id {commit_id}, "
  151. f"Redis Key: {key}"
  152. )
  153. value = self.__redis_client.get(key)
  154. logger.debug(f"Got value {value} at key {key}")
  155. return dill.loads(value) if value else None
  156. def __get_boards_at_commit_from_cache(self, commit_id: str) -> list:
  157. """
  158. Returns the list of boards for a given commit from cache if exists,
  159. None otherwise.
  160. Parameters:
  161. commit_id (str): The commit id to get boards list for.
  162. Returns:
  163. list: A list of boards available at the specified commit.
  164. Raises:
  165. RuntimeError: If the method is called when caching is disabled.
  166. """
  167. if not self.caching_enabled:
  168. raise RuntimeError("Should not be called with caching disabled.")
  169. key = self.__boards_key(commit_id=commit_id)
  170. logger.debug(
  171. f"Getting cached boards list for commit id {commit_id}, "
  172. f"Redis Key: {key}"
  173. )
  174. value = self.__redis_client.get(key)
  175. logger.debug(f"Got value {value} at key {key}")
  176. return dill.loads(value) if value else None
  177. def __get_boards_at_commit_from_repo(self, remote: str,
  178. commit_ref: str) -> list:
  179. """
  180. Returns the list of boards for a given commit from the git repo.
  181. Parameters:
  182. remote (str): The name of the remote repository.
  183. commit_ref (str): The commit reference to check out.
  184. Returns:
  185. list: A list of boards available at the specified commit.
  186. """
  187. with self.repo.get_checkout_lock():
  188. self.repo.checkout_remote_commit_ref(
  189. remote=remote,
  190. commit_ref=commit_ref,
  191. force=True,
  192. hard_reset=True,
  193. clean_working_tree=True
  194. )
  195. import importlib.util
  196. spec = importlib.util.spec_from_file_location(
  197. name="board_list.py",
  198. location=os.path.join(
  199. self.repo.get_local_path(),
  200. 'Tools', 'scripts',
  201. 'board_list.py')
  202. )
  203. mod = importlib.util.module_from_spec(spec)
  204. spec.loader.exec_module(mod)
  205. all_boards = mod.AUTOBUILD_BOARDS
  206. exclude_patterns = ['fmuv*', 'SITL*']
  207. boards = []
  208. for b in all_boards:
  209. excluded = False
  210. for p in exclude_patterns:
  211. if fnmatch.fnmatch(b.lower(), p.lower()):
  212. excluded = True
  213. break
  214. if not excluded:
  215. boards.append(b)
  216. boards.sort()
  217. return boards
  218. def __get_build_options_at_commit_from_repo(self, remote: str,
  219. commit_ref: str) -> tuple:
  220. """
  221. Returns the list of build options for a given commit from the git repo.
  222. Parameters:
  223. remote (str): The name of the remote repository.
  224. commit_ref (str): The commit reference to check out.
  225. Returns:
  226. list: A list of build options available at the specified commit.
  227. """
  228. with self.repo.get_checkout_lock():
  229. self.repo.checkout_remote_commit_ref(
  230. remote=remote,
  231. commit_ref=commit_ref,
  232. force=True,
  233. hard_reset=True,
  234. clean_working_tree=True
  235. )
  236. import importlib.util
  237. spec = importlib.util.spec_from_file_location(
  238. name="build_options.py",
  239. location=os.path.join(
  240. self.repo.get_local_path(),
  241. 'Tools',
  242. 'scripts',
  243. 'build_options.py'
  244. )
  245. )
  246. mod = importlib.util.module_from_spec(spec)
  247. spec.loader.exec_module(mod)
  248. build_options = mod.BUILD_OPTIONS
  249. return build_options
  250. def get_boards_at_commit(self, remote: str,
  251. commit_ref: str) -> list:
  252. """
  253. Retrieves a list of boards available for building at a
  254. specified commit and returns the list.
  255. If caching is enabled, this would first look in the cache for
  256. the list. In case of a cache miss, it would retrive the list
  257. by checkout out the git repo and running `board_list.py` and
  258. cache it.
  259. Parameters:
  260. remote (str): The name of the remote repository.
  261. commit_ref (str): The commit reference to check out.
  262. Returns:
  263. list: A list of boards available at the specified commit.
  264. """
  265. tstart = time.time()
  266. if not self.caching_enabled:
  267. boards = self.__get_boards_at_commit_from_repo(
  268. remote=remote,
  269. commit_ref=commit_ref,
  270. )
  271. logger.debug(
  272. f"Took {(time.time() - tstart)} seconds to get boards"
  273. )
  274. return boards
  275. commid_id = self.repo.commit_id_for_remote_ref(
  276. remote=remote,
  277. commit_ref=commit_ref,
  278. )
  279. logger.debug(f"Fetching boards for commit {commid_id}.")
  280. cached_boards = self.__get_boards_at_commit_from_cache(
  281. commit_id=commid_id
  282. )
  283. if cached_boards:
  284. boards = cached_boards
  285. else:
  286. logger.debug(
  287. "Cache miss. Fetching boards from repo for "
  288. f"commit {commid_id}."
  289. )
  290. boards = self.__get_boards_at_commit_from_repo(
  291. remote=remote,
  292. commit_ref=commid_id,
  293. )
  294. self.__cache_boards_at_commit(
  295. boards=boards,
  296. commit_id=commid_id,
  297. )
  298. logger.debug(
  299. f"Took {(time.time() - tstart)} seconds to get boards"
  300. )
  301. return boards
  302. def get_build_options_at_commit(self, remote: str,
  303. commit_ref: str) -> list:
  304. """
  305. Retrieves a list of build options available at a specified commit.
  306. If caching is enabled, this would first look in the cache for
  307. the list. In case of a cache miss, it would retrive the list
  308. by checkout out the git repo and running `build_options.py` and
  309. cache it.
  310. Parameters:
  311. remote (str): The name of the remote repository.
  312. commit_ref (str): The commit reference to check out.
  313. Returns:
  314. list: A list of build options available at the specified commit.
  315. """
  316. tstart = time.time()
  317. if not self.caching_enabled:
  318. build_options = self.__get_build_options_at_commit_from_repo(
  319. remote=remote,
  320. commit_ref=commit_ref,
  321. )
  322. logger.debug(
  323. f"Took {(time.time() - tstart)} seconds to get build options"
  324. )
  325. return build_options
  326. commid_id = self.repo.commit_id_for_remote_ref(
  327. remote=remote,
  328. commit_ref=commit_ref,
  329. )
  330. logger.debug(f"Fetching build options for commit {commid_id}.")
  331. cached_build_options = self.__get_build_options_at_commit_from_cache(
  332. commit_id=commid_id
  333. )
  334. if cached_build_options:
  335. build_options = cached_build_options
  336. else:
  337. logger.debug(
  338. "Cache miss. Fetching build options from repo for "
  339. f"commit {commid_id}."
  340. )
  341. build_options = self.__get_build_options_at_commit_from_repo(
  342. remote=remote,
  343. commit_ref=commid_id,
  344. )
  345. self.__cache_build_options_at_commit(
  346. build_options=build_options,
  347. commit_id=commid_id,
  348. )
  349. logger.debug(
  350. f"Took {(time.time() - tstart)} seconds to get build options"
  351. )
  352. return build_options
  353. @staticmethod
  354. def get_singleton():
  355. return APSourceMetadataFetcher.__singleton
  356. class VersionInfo:
  357. """
  358. Class to wrap version info properties inside a single object
  359. """
  360. def __init__(self,
  361. remote: str,
  362. commit_ref: str,
  363. release_type: str,
  364. version_number: str,
  365. ap_build_artifacts_url) -> None:
  366. self.remote = remote
  367. self.commit_ref = commit_ref
  368. self.release_type = release_type
  369. self.version_number = version_number
  370. self.ap_build_artifacts_url = ap_build_artifacts_url
  371. class RemoteInfo:
  372. """
  373. Class to wrap remote info properties inside a single object
  374. """
  375. def __init__(self,
  376. name: str,
  377. url: str) -> None:
  378. self.name = name
  379. self.url = url
  380. class VersionsFetcher:
  381. """
  382. Class to fetch the version-to-build metadata from remotes.json
  383. and provide methods to view the same
  384. """
  385. __singleton = None
  386. def __init__(self, remotes_json_path: str,
  387. ap_repo: ap_git.GitRepo):
  388. """
  389. Initializes the VersionsFetcher instance
  390. with a given remotes.json path.
  391. Parameters:
  392. remotes_json_path (str): Path to the remotes.json file.
  393. ap_repo (GitRepo): ArduPilot local git repository. This local
  394. repository is shared between the VersionsFetcher
  395. and the APSourceMetadataFetcher.
  396. Raises:
  397. TooManyInstancesError: If an instance of this class already exists,
  398. enforcing a singleton pattern.
  399. """
  400. # Enforce singleton pattern by raising an error if
  401. # an instance already exists.
  402. if VersionsFetcher.__singleton:
  403. raise ex.TooManyInstancesError()
  404. self.__remotes_json_path = remotes_json_path
  405. self.__ensure_remotes_json()
  406. self.__access_lock_versions_metadata = Lock()
  407. self.__versions_metadata = []
  408. tasks = (
  409. (self.fetch_ap_releases, 1200),
  410. (self.fetch_whitelisted_tags, 1200),
  411. )
  412. self.__task__runner = TaskRunner(tasks=tasks)
  413. self.repo = ap_repo
  414. VersionsFetcher.__singleton = self
  415. def start(self) -> None:
  416. """
  417. Start auto-fetch jobs.
  418. """
  419. logger.info("Starting VersionsFetcher background auto-fetch jobs.")
  420. self.__task__runner.start()
  421. def get_all_remotes_info(self) -> list[RemoteInfo]:
  422. """
  423. Return the list of RemoteInfo objects constructed from the
  424. information in the remotes.json file
  425. Returns:
  426. list: RemoteInfo objects for all remotes mentioned in remotes.json
  427. """
  428. return [
  429. RemoteInfo(
  430. name=remote.get('name', None),
  431. url=remote.get('url', None)
  432. )
  433. for remote in self.__get_versions_metadata()
  434. ]
  435. def get_remote_info(self, remote_name: str) -> RemoteInfo:
  436. """
  437. Return the RemoteInfo for the given remote name, None otherwise.
  438. Returns:
  439. RemoteInfo: The remote information object.
  440. """
  441. return next(
  442. (
  443. remote for remote in self.get_all_remotes_info()
  444. if remote.name == remote_name
  445. ),
  446. None
  447. )
  448. def get_versions_for_vehicle(self, vehicle_name: str) -> list[VersionInfo]:
  449. """
  450. Return the list of dictionaries containing the info about the
  451. versions listed to be built for a particular vehicle.
  452. Parameters:
  453. vehicle_name (str): the vehicle to fetch versions list for
  454. Returns:
  455. list: VersionInfo objects for all versions allowed to be
  456. built for the said vehicle.
  457. """
  458. if vehicle_name is None:
  459. raise ValueError("Vehicle is a required parameter.")
  460. versions_list = []
  461. for remote in self.__get_versions_metadata():
  462. for vehicle in remote['vehicles']:
  463. if vehicle['name'] != vehicle_name:
  464. continue
  465. for release in vehicle['releases']:
  466. versions_list.append(VersionInfo(
  467. remote=remote.get('name', None),
  468. commit_ref=release.get('commit_reference', None),
  469. release_type=release.get('release_type', None),
  470. version_number=release.get('version_number', None),
  471. ap_build_artifacts_url=release.get(
  472. 'ap_build_artifacts_url',
  473. None
  474. )
  475. ))
  476. return versions_list
  477. def get_all_vehicles_sorted_uniq(self) -> list[str]:
  478. """
  479. Return a sorted list of all vehicles listed in remotes.json structure
  480. Returns:
  481. list: Vehicles listed in remotes.json
  482. """
  483. vehicles_set = set()
  484. for remote in self.__get_versions_metadata():
  485. for vehicle in remote['vehicles']:
  486. vehicles_set.add(vehicle['name'])
  487. return sorted(list(vehicles_set))
  488. def is_version_listed(self, vehicle: str, remote: str,
  489. commit_ref: str) -> bool:
  490. """
  491. Check if a version with given properties mentioned in remotes.json
  492. Parameters:
  493. vehicle (str): vehicle for which version is listed
  494. remote (str): remote under which the version is listed
  495. commit_ref(str): commit reference for the version
  496. Returns:
  497. bool: True if the said version is mentioned in remotes.json,
  498. False otherwise
  499. """
  500. if vehicle is None:
  501. raise ValueError("Vehicle is a required parameter.")
  502. if remote is None:
  503. raise ValueError("Remote is a required parameter.")
  504. if commit_ref is None:
  505. raise ValueError("Commit reference is a required parameter.")
  506. return (remote, commit_ref) in [
  507. (version_info.remote, version_info.commit_ref)
  508. for version_info in
  509. self.get_versions_for_vehicle(vehicle_name=vehicle)
  510. ]
  511. def get_version_info(self, vehicle: str, remote: str,
  512. commit_ref: str) -> VersionInfo:
  513. """
  514. Find first version matching the given properties in remotes.json
  515. Parameters:
  516. vehicle (str): vehicle for which version is listed
  517. remote (str): remote under which the version is listed
  518. commit_ref(str): commit reference for the version
  519. Returns:
  520. VersionInfo: Object for the version matching the properties,
  521. None if not found
  522. """
  523. return next(
  524. (
  525. version
  526. for version in self.get_versions_for_vehicle(
  527. vehicle_name=vehicle
  528. )
  529. if version.remote == remote and
  530. version.commit_ref == commit_ref
  531. ),
  532. None
  533. )
  534. def reload_remotes_json(self) -> None:
  535. """
  536. Read remotes.json, validate its structure against the schema
  537. and cache it in memory
  538. """
  539. # load file containing vehicles listed to be built for each
  540. # remote along with the branches/tags/commits on which the
  541. # firmware can be built
  542. remotes_json_schema_path = os.path.join(
  543. os.path.dirname(__file__),
  544. 'remotes.schema.json'
  545. )
  546. with open(self.__remotes_json_path, 'r') as f, \
  547. open(remotes_json_schema_path, 'r') as s:
  548. f_content = f.read()
  549. # Early return if file is empty
  550. if not f_content:
  551. return
  552. versions_metadata = json.loads(f_content)
  553. schema = json.loads(s.read())
  554. # validate schema
  555. jsonschema.validate(instance=versions_metadata, schema=schema)
  556. self.__set_versions_metadata(versions_metadata=versions_metadata)
  557. # update git repo with latest remotes list
  558. self.__sync_remotes_with_ap_repo()
  559. def __ensure_remotes_json(self) -> None:
  560. """
  561. Ensures remotes.json exists and is a valid JSON file.
  562. """
  563. p = Path(self.__remotes_json_path)
  564. if not p.exists():
  565. # Ensure parent directory exists
  566. Path.mkdir(p.parent, parents=True, exist_ok=True)
  567. # write empty json list
  568. with open(p, 'w') as f:
  569. f.write('[]')
  570. def __set_versions_metadata(self, versions_metadata: list) -> None:
  571. """
  572. Set versions metadata property with the one passed as parameter
  573. This requires to acquire the access lock to avoid overwriting the
  574. object while it is being read
  575. """
  576. if versions_metadata is None:
  577. raise ValueError("versions_metadata is a required parameter. "
  578. "Cannot be None.")
  579. with self.__access_lock_versions_metadata:
  580. self.__versions_metadata = versions_metadata
  581. def __get_versions_metadata(self) -> list:
  582. """
  583. Read versions metadata property
  584. This requires to acquire the access lock to avoid reading the list
  585. while it is being modified
  586. Returns:
  587. list: the versions metadata list
  588. """
  589. with self.__access_lock_versions_metadata:
  590. return self.__versions_metadata
  591. def __sync_remotes_with_ap_repo(self):
  592. """
  593. Update the remotes in ArduPilot local repository with the latest
  594. remotes list.
  595. """
  596. remotes = tuple(
  597. (remote.name, remote.url)
  598. for remote in self.get_all_remotes_info()
  599. )
  600. self.repo.remote_add_bulk(remotes=remotes, force=True)
  601. def fetch_ap_releases(self) -> None:
  602. """
  603. Execute the fetch_releases.py script to update remotes.json
  604. with Ardupilot's official releases
  605. """
  606. from scripts import fetch_releases
  607. fetch_releases.run(
  608. base_dir=os.path.join(
  609. os.path.dirname(self.__remotes_json_path),
  610. '..',
  611. ),
  612. remote_name="ardupilot",
  613. )
  614. self.reload_remotes_json()
  615. return
  616. def fetch_whitelisted_tags(self) -> None:
  617. """
  618. Execute the fetch_whitelisted_tags.py script to update
  619. remotes.json with tags from whitelisted repos
  620. """
  621. from scripts import fetch_whitelisted_tags
  622. fetch_whitelisted_tags.run(
  623. base_dir=os.path.join(
  624. os.path.dirname(self.__remotes_json_path),
  625. '..',
  626. )
  627. )
  628. self.reload_remotes_json()
  629. return
  630. @staticmethod
  631. def get_singleton():
  632. return VersionsFetcher.__singleton