Przeglądaj źródła

web: use build manager and builder

Shiv Tyagi 1 rok temu
rodzic
commit
b94e47beaf
1 zmienionych plików z 73 dodań i 438 usunięć
  1. 73 438
      web/app.py

+ 73 - 438
web/app.py

@@ -1,20 +1,10 @@
 #!/usr/bin/env python3
 
 import os
-import subprocess
-import json
-import pathlib
-import shutil
-import glob
-import time
-import fcntl
 import base64
-import hashlib
-from distutils.dir_util import copy_tree
-from flask import Flask, render_template, request, send_from_directory, render_template_string, jsonify, redirect
-from threading import Thread, Lock
+from flask import Flask, render_template, request, send_from_directory, jsonify, redirect
+from threading import Thread
 import sys
-import re
 import requests
 
 from logging.config import dictConfig
@@ -39,6 +29,8 @@ dictConfig({
 sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
 import ap_git
 import metadata_manager
+import build_manager
+from builder import Builder
 
 # run at lower priority
 os.nice(20)
@@ -59,16 +51,13 @@ cmd_opts, cmd_args = parser.parse_args()
 basedir = os.path.abspath(cmd_opts.basedir)
 sourcedir = os.path.join(basedir, 'ardupilot')
 outdir_parent = os.path.join(basedir, 'builds')
-tmpdir_parent = os.path.join(basedir, 'tmp')
+workdir_parent = os.path.join(basedir, 'workdir')
 
 appdir = os.path.dirname(__file__)
 
 builds_dict = {}
 REMOTES = None
 
-# LOCKS
-queue_lock = Lock()
-
 repo = ap_git.GitRepo.clone_if_needed(
     source="https://github.com/ardupilot/ardupilot.git",
     dest=sourcedir,
@@ -82,319 +71,33 @@ versions_fetcher = metadata_manager.VersionsFetcher(
     remotes_json_path=os.path.join(basedir, 'configs', 'remotes.json'),
     ap_repo=repo
 )
+
+manager = build_manager.BuildManager(
+    outdir=outdir_parent,
+    redis_host=os.getenv('CBS_REDIS_HOST', default='localhost'),
+    redis_port=os.getenv('CBS_REDIS_PORT', default='6379')
+)
+cleaner = build_manager.BuildArtifactsCleaner()
+progress_updater = build_manager.BuildProgressUpdater()
+
 versions_fetcher.start()
+cleaner.start()
+progress_updater.start()
 
-def remove_directory_recursive(dirname):
-    '''remove a directory recursively'''
-    app.logger.info('Removing directory ' + dirname)
-    if not os.path.exists(dirname):
-        return
-    f = pathlib.Path(dirname)
-    if f.is_file():
-        f.unlink()
-    else:
-        shutil.rmtree(f, True)
-
-
-def create_directory(dir_path):
-    '''create a directory, don't fail if it exists'''
-    app.logger.info('Creating ' + dir_path)
-    pathlib.Path(dir_path).mkdir(parents=True, exist_ok=True)
-
-
-def run_build(task, tmpdir, outdir, logpath):
-    '''run a build with parameters from task'''
-    remove_directory_recursive(tmpdir_parent)
-    create_directory(tmpdir)
-    tmp_src_dir = os.path.join(tmpdir, 'build_src')
-    source_repo = ap_git.GitRepo.shallow_clone_at_commit_from_local(
-        source=sourcedir,
-        remote=task['remote'],
-        commit_ref=task['git_hash_short'],
-        dest=tmp_src_dir
+if os.getenv('CBS_ENABLE_INBUILT_BUILDER', default='1') == '1':
+    builder = Builder(
+        workdir=workdir_parent,
+        source_repo=repo
     )
-    # update submodules in temporary source directory
-    source_repo.submodule_update(init=True, recursive=True, force=True)
-    # checkout to the commit pointing to the requested commit
-    source_repo.checkout_remote_commit_ref(
-        remote=task['remote'],
-        commit_ref=task['git_hash_short'],
-        force=True,
-        hard_reset=True,
-        clean_working_tree=True
+    builder_thread = Thread(
+        target=builder.run,
+        daemon=True
     )
-    if not os.path.isfile(os.path.join(outdir, 'extra_hwdef.dat')):
-        app.logger.error('Build aborted, missing extra_hwdef.dat')
-    app.logger.info('Appending to build.log')
-    with open(logpath, 'a') as log:
-
-        log.write('Setting vehicle to: ' + task['vehicle'].capitalize() + '\n')
-        log.flush()
-        # setup PATH to point at our compiler
-        env = os.environ.copy()
-        bindir1 = os.path.abspath(os.path.join(appdir, "..", "..", "bin"))
-        bindir2 = os.path.abspath(os.path.join(appdir, "..", "..", "gcc", "bin"))
-        cachedir = os.path.abspath(os.path.join(appdir, "..", "..", "cache"))
-
-        env["PATH"] = bindir1 + ":" + bindir2 + ":" + env["PATH"]
-        env['CCACHE_DIR'] = cachedir
-
-        app.logger.info('Running waf configure')
-        log.write('Running waf configure\n')
-        log.flush()
-        subprocess.run(['python3', './waf', 'configure',
-                        '--board', task['board'], 
-                        '--out', tmpdir, 
-                        '--extra-hwdef', task['extra_hwdef']],
-                        cwd = tmp_src_dir,
-                        env=env,
-                        stdout=log, stderr=log, shell=False)
-        app.logger.info('Running clean')
-        log.write('Running clean\n')
-        log.flush()
-        subprocess.run(['python3', './waf', 'clean'],
-                        cwd = tmp_src_dir, 
-                        env=env,
-                        stdout=log, stderr=log, shell=False)
-        app.logger.info('Running build')
-        log.write('Running build\n')
-        log.flush()
-        subprocess.run(['python3', './waf', task['vehicle']],
-                        cwd = tmp_src_dir,
-                        env=env,
-                        stdout=log, stderr=log, shell=False)
-        log.write('done build\n')
-        log.flush()
-
-def sort_json_files(reverse=False):
-    json_files = list(filter(os.path.isfile,
-                             glob.glob(os.path.join(outdir_parent,
-                                                    '*', 'q.json'))))
-    json_files.sort(key=lambda x: os.path.getmtime(x), reverse=reverse)
-    return json_files
-
-def check_queue():
-    '''thread to continuously run queued builds'''
-    queue_lock.acquire()
-    json_files = sort_json_files()
-    queue_lock.release()
-    if len(json_files) == 0:
-        return
-    # remove multiple build requests from same ip address (keep newest)
-    queue_lock.acquire()
-    ip_list = []
-    for f in json_files:
-        file = json.loads(open(f).read())
-        ip_list.append(file['ip'])
-    seen = set()
-    ip_list.reverse()
-    for index, value in enumerate(ip_list):
-        if value in seen:
-            file = json.loads(open(json_files[-index-1]).read())
-            outdir_to_delete = os.path.join(outdir_parent, file['token'])
-            remove_directory_recursive(outdir_to_delete)
-        else:
-            seen.add(value)
-    queue_lock.release()
-    if len(json_files) == 0:
-        return
-    # open oldest q.json file
-    json_files = sort_json_files()
-    taskfile = json_files[0]
-    app.logger.info('Opening ' + taskfile)
-    task = json.loads(open(taskfile).read())
-    app.logger.info('Removing ' + taskfile)
-    os.remove(taskfile)
-    outdir = os.path.join(outdir_parent, task['token'])
-    tmpdir = os.path.join(tmpdir_parent, task['token'])
-    logpath = os.path.abspath(os.path.join(outdir, 'build.log'))
-    app.logger.info("LOGPATH: %s" % logpath)
-    try:
-        # run build and rename build directory
-        app.logger.info('MIR: Running build ' + str(task))
-        run_build(task, tmpdir, outdir, logpath)
-        app.logger.info('Copying build files from %s to %s',
-                        os.path.join(tmpdir, task['board']),
-                            outdir)
-        copy_tree(os.path.join(tmpdir, task['board'], 'bin'), outdir)
-        app.logger.info('Build successful!')
-        remove_directory_recursive(tmpdir)
-
-    except Exception as ex:
-        app.logger.info('Build failed: ', ex)
-        pass
-    open(logpath,'a').write("\nBUILD_FINISHED\n")
-
-def file_age(fname):
-    '''return file age in seconds'''
-    return time.time() - os.stat(fname).st_mtime
-
-def remove_old_builds():
-    '''as a cleanup, remove any builds older than 24H'''
-    for f in os.listdir(outdir_parent):
-        bdir = os.path.join(outdir_parent, f)
-        if os.path.isdir(bdir) and file_age(bdir) > 24 * 60 * 60:
-            remove_directory_recursive(bdir)
-    time.sleep(5)
-
-def queue_thread():
-    while True:
-        try:
-            check_queue()
-            remove_old_builds()
-        except Exception as ex:
-            app.logger.error('Failed queue: ', ex)
-            pass
-
-def get_build_progress(build_id, build_status):
-    '''return build progress on scale of 0 to 100'''
-    if build_status in ['Pending', 'Error']:
-        return 0
-    
-    if build_status == 'Finished':
-        return 100
-    
-    log_file_path = os.path.join(outdir_parent,build_id,'build.log')
-    app.logger.info('Opening ' + log_file_path)
-    build_log = open(log_file_path, encoding='utf-8').read()
-    compiled_regex = re.compile(r'(\[\D*(\d+)\D*\/\D*(\d+)\D*\])')
-    all_matches = compiled_regex.findall(build_log)
-
-    if (len(all_matches) < 1):
-        return 0
-
-    completed_steps, total_steps = all_matches[-1][1:]
-    if (int(total_steps) < 20):
-        # these steps are just little compilation and linking that happen at initialisation
-        # these do not contribute significant percentage to overall build progress
-        return 1
-    
-    if (int(total_steps) < 200):
-        # these steps are for building the OS
-        # we give this phase 4% weight in the whole build progress
-        return (int(completed_steps) * 4 // int(total_steps)) + 1
-    
-    # these steps are the major part of the build process
-    # we give 95% of weight to these
-    return (int(completed_steps) * 95 // int(total_steps)) + 5
-
-
-def get_build_status(build_id):
-    build_id_split = build_id.split(':')
-    if len(build_id_split) < 2:
-        raise Exception('Invalid build id')
-
-    if os.path.exists(os.path.join(outdir_parent,build_id,'q.json')):
-        status = "Pending"
-    elif not os.path.exists(os.path.join(outdir_parent,build_id,'build.log')):
-        status = "Error"
-    else:
-        log_file_path = os.path.join(outdir_parent,build_id,'build.log')
-        app.logger.info('Opening ' + log_file_path)
-        build_log = open(log_file_path, encoding='utf-8').read()
-        if build_log.find("'%s' finished successfully" % build_id_split[0].lower()) != -1:
-            status = "Finished"
-        elif build_log.find('The configuration failed') != -1 or build_log.find('Build failed') != -1 or build_log.find('compilation terminated') != -1:
-            status = "Failed"
-        elif build_log.find('BUILD_FINISHED') == -1:
-            status = "Running"
-        else:
-            status = "Failed"
-    return status
-
-def update_build_dict():
-    '''update the build_dict dictionary which keeps track of status of all builds'''
-    global builds_dict
-    # get list of directories
-    blist = []
-    for b in os.listdir(outdir_parent):
-        if os.path.isdir(os.path.join(outdir_parent,b)):
-            blist.append(b)
-
-    #remove deleted builds from build_dict
-    for build in builds_dict:
-        if build not in blist:
-            builds_dict.pop(build, None)
-
-    for b in blist:
-        build_id_split = b.split(':')
-        if len(build_id_split) < 2:
-            continue
-        build_info = builds_dict.get(b, None)
-        # add an entry for the build in build_dict if not exists
-        if (build_info is None):
-            build_info = {}
-            build_info['vehicle'] = build_id_split[0].capitalize()
-            build_info['board'] = build_id_split[1]
-            feature_file = os.path.join(outdir_parent, b, 'selected_features.json')
-            app.logger.info('Opening ' + feature_file)
-            selected_features_dict = json.loads(open(feature_file).read())
-            selected_features = selected_features_dict['selected_features']
-            build_info['git_hash_short'] = selected_features_dict['git_hash_short']
-            features = ''
-            for feature in selected_features:
-                if features == '':
-                    features = features + feature
-                else:
-                    features = features + ", " + feature
-            build_info['features'] = features
-
-        age_min = int(file_age(os.path.join(outdir_parent,b))/60.0)
-        build_info['age'] = "%u:%02u" % ((age_min // 60), age_min % 60)
-
-        # refresh build status only if it was pending, running or not initialised
-        if (build_info.get('status', None) in ['Pending', 'Running', None]):
-            build_info['status'] = get_build_status(b)
-            build_info['progress'] = get_build_progress(b, build_info['status'])
-
-        # update dictionary entry
-        builds_dict[b] = build_info
-
-    temp_list = sorted(list(builds_dict.items()), key=lambda x: os.path.getmtime(os.path.join(outdir_parent,x[0])), reverse=True)
-    builds_dict = {ele[0] : ele[1]  for ele in temp_list}
-
-def create_status():
-    '''create status.json'''
-    global builds_dict
-    update_build_dict()
-    tmpfile = os.path.join(outdir_parent, "status.tmp")
-    statusfile = os.path.join(outdir_parent, "status.json")
-    json_object = json.dumps(builds_dict)
-    with open(tmpfile, "w") as outfile:
-        outfile.write(json_object)
-    os.replace(tmpfile, statusfile)
-
-def status_thread():
-    while True:
-        try:
-            create_status()
-        except Exception as ex:
-            app.logger.info(ex)
-            pass
-        time.sleep(3)
+    builder_thread.start()
 
 app = Flask(__name__, template_folder='templates')
 
-if not os.path.isdir(outdir_parent):
-    create_directory(outdir_parent)
-
-try:
-    lock_file = open(os.path.join(basedir, "queue.lck"), "w")
-    fcntl.flock(lock_file, fcntl.LOCK_EX | fcntl.LOCK_NB)
-    app.logger.info("Got queue lock")
-    # we only want one set of threads
-    thread = Thread(target=queue_thread, args=())
-    thread.daemon = True
-    thread.start()
-
-    status_thread = Thread(target=status_thread, args=())
-    status_thread.daemon = True
-    status_thread.start()
-except IOError:
-    app.logger.info("No queue lock")
-
 versions_fetcher.reload_remotes_json()
-
 app.logger.info('Python version is: %s' % sys.version)
 
 def get_auth_token():
@@ -426,136 +129,68 @@ def refresh_remotes():
 @app.route('/generate', methods=['GET', 'POST'])
 def generate():
     try:
-        chosen_version = request.form['version']
-        chosen_remote, chosen_commit_reference = chosen_version.split('/', 1)
-        chosen_vehicle = request.form['vehicle']
-        chosen_version_info = versions_fetcher.get_version_info(
-            vehicle=chosen_vehicle,
-            remote=chosen_remote,
-            commit_ref=chosen_commit_reference
+        version = request.form['version']
+        remote_name, commit_ref = version.split('/', 1)
+        remote_info = versions_fetcher.get_remote_info(remote_name)
+
+        if remote_info is None:
+            raise Exception(f"Remote {remote_name} is not whitelisted.")
+
+        vehicle = request.form['vehicle']
+        version_info = versions_fetcher.get_version_info(
+            vehicle=vehicle,
+            remote=remote_name,
+            commit_ref=commit_ref
         )
 
-        if chosen_version_info is None:
+        if version_info is None:
             raise Exception("Commit reference invalid or not listed to be built for given vehicle for remote")
 
-        chosen_board = request.form['board']
+        board = request.form['board']
         boards_at_commit = ap_src_metadata_fetcher.get_boards_at_commit(
-            remote=chosen_remote,
-            commit_ref=chosen_commit_reference
+            remote=remote_name,
+            commit_ref=commit_ref
         )
-        if chosen_board not in boards_at_commit:
+        if board not in boards_at_commit:
             raise Exception("bad board")
 
-        #ToDo - maybe have the if-statement to check if it's changed.
-        build_options = ap_src_metadata_fetcher.get_build_options_at_commit(
-            remote=chosen_remote,
-            commit_ref=chosen_commit_reference
+        all_features = ap_src_metadata_fetcher.get_build_options_at_commit(
+            remote=remote_name,
+            commit_ref=commit_ref
         )
 
-        # fetch features from user input
-        extra_hwdef = []
-        feature_list = []
-        selected_features = []
-        app.logger.info('Fetching features from user input')
-
-        # add all undefs at the start
-        for f in build_options:
-            extra_hwdef.append('undef %s' % f.define)
-
-        for f in build_options:
-            if f.label not in request.form or request.form[f.label] != '1':
-                extra_hwdef.append('define %s 0' % f.define)
-            else:
-                extra_hwdef.append('define %s 1' % f.define)
-                feature_list.append(f.description)
-                selected_features.append(f.label)
-
-        extra_hwdef = '\n'.join(extra_hwdef)
-        spaces = '\n'
-        feature_list = spaces.join(feature_list)
-        selected_features_dict = {}
-        selected_features_dict['selected_features'] = selected_features
-
-        queue_lock.acquire()
-
-        # create extra_hwdef.dat file and obtain md5sum
-        app.logger.info('Creating ' + 
-                        os.path.join(outdir_parent, 'extra_hwdef.dat'))
-        file = open(os.path.join(outdir_parent, 'extra_hwdef.dat'), 'w')
-        app.logger.info('Writing\n' + extra_hwdef)
-        file.write(extra_hwdef)
-        file.close()
-
-        extra_hwdef_md5sum = hashlib.md5(extra_hwdef.encode('utf-8')).hexdigest()
-        app.logger.info('Removing ' +
-                        os.path.join(outdir_parent, 'extra_hwdef.dat'))
-        os.remove(os.path.join(outdir_parent, 'extra_hwdef.dat'))
-
-        new_git_hash = repo.commit_id_for_remote_ref(
-            remote=chosen_remote,
-            commit_ref=chosen_commit_reference
+        chosen_defines = {
+            feature.define
+            for feature in all_features
+            if request.form.get(feature.label) == "1"
+        }
+
+        git_hash = repo.commit_id_for_remote_ref(
+            remote=remote_name,
+            commit_ref=commit_ref
         )
-        git_hash_short = new_git_hash[:10]
-        app.logger.info('Git hash = ' + new_git_hash)
-        selected_features_dict['git_hash_short'] = git_hash_short
-
-        # create directories using concatenated token 
-        # of vehicle, board, git-hash of source, and md5sum of hwdef
-        token = chosen_vehicle.lower() + ':' + chosen_board + ':' + new_git_hash + ':' + extra_hwdef_md5sum
-        app.logger.info('token = ' + token)
-        outdir = os.path.join(outdir_parent, token)
-
-        if os.path.isdir(outdir):
-            app.logger.info('Build already exists')
+
+        build_info = build_manager.BuildInfo(
+            vehicle=vehicle,
+            remote_info=remote_info,
+            git_hash=git_hash,
+            board=board,
+            selected_features=chosen_defines
+        )
+
+        forwarded_for = request.headers.get('X-Forwarded-For', None)
+        if forwarded_for:
+            client_ip = forwarded_for.split(',')[0].strip()
         else:
-            create_directory(outdir)
-            # create build.log
-            build_log_info = ('Vehicle: ' + chosen_vehicle +
-                '\nBoard: ' + chosen_board +
-                '\nRemote: ' + chosen_remote +
-                '\ngit-sha: ' + git_hash_short +
-                '\nVersion: ' + chosen_version_info.release_type + '-' + chosen_version_info.version_number +
-                '\nSelected Features:\n' + feature_list +
-                '\n\nWaiting for build to start...\n\n')
-            app.logger.info('Creating build.log')
-            build_log = open(os.path.join(outdir, 'build.log'), 'w')
-            build_log.write(build_log_info)
-            build_log.close()
-            # create hwdef.dat
-            app.logger.info('Opening ' + 
-                            os.path.join(outdir, 'extra_hwdef.dat'))
-            file = open(os.path.join(outdir, 'extra_hwdef.dat'),'w')
-            app.logger.info('Writing\n' + extra_hwdef)
-            file.write(extra_hwdef)
-            file.close()
-            # fill dictionary of variables and create json file
-            task = {}
-            task['token'] = token
-            task['remote'] = chosen_remote
-            task['git_hash_short'] = git_hash_short
-            task['version'] = chosen_version_info.release_type + '-' + chosen_version_info.version_number
-            task['extra_hwdef'] = os.path.join(outdir, 'extra_hwdef.dat')
-            task['vehicle'] = chosen_vehicle.lower()
-            task['board'] = chosen_board
-            task['ip'] = request.remote_addr
-            app.logger.info('Opening ' + os.path.join(outdir, 'q.json'))
-            jfile = open(os.path.join(outdir, 'q.json'), 'w')
-            app.logger.info('Writing task file to ' + 
-                            os.path.join(outdir, 'q.json'))
-            jfile.write(json.dumps(task, separators=(',\n', ': ')))
-            jfile.close()
-            # create selected_features.dat for status table
-            feature_file = open(os.path.join(outdir, 'selected_features.json'), 'w')
-            app.logger.info('Writing\n' + os.path.join(outdir, 'selected_features.json'))
-            feature_file.write(json.dumps(selected_features_dict))
-            feature_file.close()
-
-        queue_lock.release()
-
-        base_url = request.url_root
-        app.logger.info(base_url)
+            client_ip = request.remote_addr
+
+        build_id = manager.submit_build(
+            build_info=build_info,
+            client_ip=client_ip,
+        )
+
         app.logger.info('Redirecting to /viewlog')
-        return redirect('/viewlog/'+token)
+        return redirect('/viewlog/'+build_id)
 
     except Exception as ex:
         app.logger.error(ex)