Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 7320410c authored by Treehugger Robot's avatar Treehugger Robot Committed by Gerrit Code Review
Browse files

Merge "Prototype of build orchestrator"

parents 55d52209 0daf186a
Loading
Loading
Loading
Loading
+46 −0
Original line number Diff line number Diff line
#!/usr/bin/env python3
import os
import sys
import yaml

from hierarchy import parse_hierarchy


def main():
  if len(sys.argv) != 2:
    print('usage: %s target' % sys.argv[0])
    exit(1)

  args = sys.argv[1].split('-')
  if len(args) != 2:
    print('target format: {target}-{variant}')
    exit(1)

  target, variant = args

  if variant not in ['eng', 'user', 'userdebug']:
    print('unknown variant "%s": expected "eng", "user" or "userdebug"' %
          variant)
    exit(1)

  build_top = os.getenv('BUFFET_BUILD_TOP')
  if not build_top:
    print('BUFFET_BUILD_TOP is not set; Did you correctly run envsetup.sh?')
    exit(1)

  hierarchy_map = parse_hierarchy(build_top)

  if target not in hierarchy_map:
    raise RuntimeError(
        "unknown target '%s': couldn't find the target. Supported targets are: %s"
        % (target, list(hierarchy_map.keys())))

  hierarchy = [target]
  while hierarchy_map[hierarchy[-1]]:
    hierarchy.append(hierarchy_map[hierarchy[-1]])

  print('Target hierarchy for %s: %s' % (target, hierarchy))


if __name__ == '__main__':
  main()
+367 −0
Original line number Diff line number Diff line
#!/usr/bin/env python3
# Copyright (C) 2022 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#      http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import collections
import copy
import hierarchy
import json
import logging
import filecmp
import os
import shutil
import subprocess
import sys
import tempfile
import collect_metadata
import utils

BUILD_CMD_TO_ALL = (
  'clean',
  'installclean',
  'update-meta',
)
BUILD_ALL_EXEMPTION = (
  'art',
)

def get_supported_product(ctx, supported_products):
  hierarchy_map = hierarchy.parse_hierarchy(ctx.build_top())
  target = ctx.target_product()

  while target not in supported_products:
    if target not in hierarchy_map:
      return None
    target = hierarchy_map[target]
  return target


def parse_goals(ctx, metadata, goals):
  """Parse goals and returns a map from each component to goals.

    e.g.

    "m main art timezone:foo timezone:bar" will return the following dict: {
        "main": {"all"},
        "art": {"all"},
        "timezone": {"foo", "bar"},
    }
  """
  # for now, goal should look like:
  # {component} or {component}:{subgoal}

  ret = collections.defaultdict(set)

  for goal in goals:
    # check if the command is for all components
    if goal in BUILD_CMD_TO_ALL:
      ret['all'].add(goal)
      continue

    # should be {component} or {component}:{subgoal}
    try:
      component, subgoal = goal.split(':') if ':' in goal else (goal, 'all')
    except ValueError:
      raise RuntimeError(
          'unknown goal: %s: should be {component} or {component}:{subgoal}' %
          goal)
    if component not in metadata:
      raise RuntimeError('unknown goal: %s: component %s not found' %
                         (goal, component))
    if not get_supported_product(ctx, metadata[component]['lunch_targets']):
      raise RuntimeError("can't find matching target. Supported targets are: " +
                         str(metadata[component]['lunch_targets']))

    ret[component].add(subgoal)

  return ret


def find_cycle(metadata):
  """ Finds a cyclic dependency among components.

  This is for debugging.
  """
  visited = set()
  parent_node = dict()
  in_stack = set()

  # Returns a cycle if one is found
  def dfs(node):
    # visit_order[visit_time[node] - 1] == node
    nonlocal visited, parent_node, in_stack

    visited.add(node)
    in_stack.add(node)
    if 'deps' not in metadata[node]:
      in_stack.remove(node)
      return None
    for next in metadata[node]['deps']:
      # We found a cycle (next ~ node) if next is still in the stack
      if next in in_stack:
        cycle = [node]
        while cycle[-1] != next:
          cycle.append(parent_node[cycle[-1]])
        return cycle

      # Else, continue searching
      if next in visited:
        continue

      parent_node[next] = node
      result = dfs(next)
      if result:
        return result

    in_stack.remove(node)
    return None

  for component in metadata:
    if component in visited:
      continue

    result = dfs(component)
    if result:
      return result

  return None


def topological_sort_components(metadata):
  """ Performs topological sort on components.

  If A depends on B, B appears first.
  """
  # If A depends on B, we want B to appear before A. But the graph in metadata
  # is represented as A -> B (B in metadata[A]['deps']). So we sort in the
  # reverse order, and then reverse the result again to get the desired order.
  indegree = collections.defaultdict(int)
  for component in metadata:
    if 'deps' not in metadata[component]:
      continue
    for dep in metadata[component]['deps']:
      indegree[dep] += 1

  component_queue = collections.deque()
  for component in metadata:
    if indegree[component] == 0:
      component_queue.append(component)

  result = []
  while component_queue:
    component = component_queue.popleft()
    result.append(component)
    if 'deps' not in metadata[component]:
      continue
    for dep in metadata[component]['deps']:
      indegree[dep] -= 1
      if indegree[dep] == 0:
        component_queue.append(dep)

  # If topological sort fails, there must be a cycle.
  if len(result) != len(metadata):
    cycle = find_cycle(metadata)
    raise RuntimeError('circular dependency found among metadata: %s' % cycle)

  return result[::-1]


def add_dependency_goals(ctx, metadata, component, goals):
  """ Adds goals that given component depends on."""
  # For now, let's just add "all"
  # TODO: add detailed goals (e.g. API build rules, library build rules, etc.)
  if 'deps' not in metadata[component]:
    return

  for dep in metadata[component]['deps']:
    goals[dep].add('all')


def sorted_goals_with_dependencies(ctx, metadata, parsed_goals):
  """ Analyzes the dependency graph among components, adds build commands for

  dependencies, and then sorts the goals.

  Returns a list of tuples: (component_name, set of subgoals).
  Builds should be run in the list's order.
  """
  # TODO(inseob@): after topological sort, some components may be built in
  # parallel.

  topological_order = topological_sort_components(metadata)
  combined_goals = copy.deepcopy(parsed_goals)

  # Add build rules for each component's dependencies
  # We do this in reverse order, so it can be transitive.
  # e.g. if A depends on B and B depends on C, and we build A,
  # C should also be built, in addition to B.
  for component in topological_order[::-1]:
    if component in combined_goals:
      add_dependency_goals(ctx, metadata, component, combined_goals)

  ret = []
  for component in ['all'] + topological_order:
    if component in combined_goals:
      ret.append((component, combined_goals[component]))

  return ret


def run_build(ctx, metadata, component, subgoals):
  build_cmd = metadata[component]['build_cmd']
  out_dir = metadata[component]['out_dir']
  default_goals = ''
  if 'default_goals' in metadata[component]:
    default_goals = metadata[component]['default_goals']

  if 'all' in subgoals:
    goal = default_goals
  else:
    goal = ' '.join(subgoals)

  build_vars = ''
  if 'update-meta' in subgoals:
    build_vars = 'TARGET_MULTITREE_UPDATE_META=true'
  # TODO(inseob@): shell escape
  cmd = [
      '/bin/bash', '-c',
      'source build/envsetup.sh && lunch %s-%s && %s %s %s' %
      (get_supported_product(ctx, metadata[component]['lunch_targets']),
       ctx.target_build_variant(), build_vars, build_cmd, goal)
  ]
  logging.debug('cwd: ' + metadata[component]['path'])
  logging.debug('running build: ' + str(cmd))

  subprocess.run(cmd, cwd=metadata[component]['path'], check=True)


def run_build_all(ctx, metadata, subgoals):
  for component in metadata:
    if component in BUILD_ALL_EXEMPTION:
      continue
    run_build(ctx, metadata, component, subgoals)


def find_components(metadata, predicate):
  for component in metadata:
    if predicate(component):
      yield component


def import_filegroups(metadata, component, exporting_component, target_file_pairs):
  imported_filegroup_dir = os.path.join(metadata[component]['path'], 'imported', exporting_component)

  bp_content = ''
  for name, outpaths in target_file_pairs:
    bp_content += ('filegroup {{\n'
                   '    name: "{fname}",\n'
                   '    srcs: [\n'.format(fname=name))
    for outpath in outpaths:
      bp_content += '        "{outfile}",\n'.format(outfile=os.path.basename(outpath))
    bp_content += ('    ],\n'
                   '}\n')

    with tempfile.TemporaryDirectory() as tmp_dir:
      with open(os.path.join(tmp_dir, 'Android.bp'), 'w') as fout:
        fout.write(bp_content)
      for _, outpaths in target_file_pairs:
        for outpath in outpaths:
          os.symlink(os.path.join(metadata[exporting_component]['path'], outpath),
                    os.path.join(tmp_dir, os.path.basename(outpath)))
      cmp_result = filecmp.dircmp(tmp_dir, imported_filegroup_dir)
      if os.path.exists(imported_filegroup_dir) and len(
          cmp_result.left_only) + len(cmp_result.right_only) + len(
              cmp_result.diff_files) == 0:
        # Files are identical, it doesn't need to be written
        logging.info(
            'imported files exists and the contents are identical: {} -> {}'
            .format(component, exporting_component))
        continue
      logging.info('creating symlinks for imported files: {} -> {}'.format(
          component, exporting_component))
      os.makedirs(imported_filegroup_dir, exist_ok=True)
      shutil.rmtree(imported_filegroup_dir, ignore_errors=True)
      shutil.move(tmp_dir, imported_filegroup_dir)


def prepare_build(metadata, component):
  imported_dir = os.path.join(metadata[component]['path'], 'imported')
  if utils.META_DEPS not in metadata[component]:
    if os.path.exists(imported_dir):
      logging.debug('remove {}'.format(imported_dir))
      shutil.rmtree(imported_dir)
    return

  imported_components = set()
  for exp_comp in metadata[component][utils.META_DEPS]:
    if utils.META_FILEGROUP in metadata[component][utils.META_DEPS][exp_comp]:
      filegroups = metadata[component][utils.META_DEPS][exp_comp][utils.META_FILEGROUP]
      target_file_pairs = []
      for name in filegroups:
        target_file_pairs.append((name, filegroups[name]))
      import_filegroups(metadata, component, exp_comp, target_file_pairs)
      imported_components.add(exp_comp)

  # Remove directories that are not generated this time.
  if os.path.exists(imported_dir):
    if len(imported_components) == 0:
      shutil.rmtree(imported_dir)
    else:
      for remove_target in set(os.listdir(imported_dir)) - imported_components:
        logging.info('remove unnecessary imported dir: {}'.format(remove_target))
        shutil.rmtree(os.path.join(imported_dir, remove_target))


def main():
  utils.set_logging_config(logging.DEBUG)
  ctx = utils.get_build_context()

  logging.info('collecting metadata')

  utils.set_logging_config(True)

  goals = sys.argv[1:]
  if not goals:
    logging.debug('empty goals. defaults to main')
    goals = ['main']

  logging.debug('goals: ' + str(goals))

  # Force update the metadata for the 'update-meta' build
  metadata_collector = collect_metadata.MetadataCollector(
      ctx.components_top(), ctx.out_dir(),
      collect_metadata.COMPONENT_METADATA_DIR,
      collect_metadata.COMPONENT_METADATA_FILE,
      force_update='update-meta' in goals)
  metadata_collector.collect()

  metadata = metadata_collector.get_metadata()
  logging.debug('metadata: ' + str(metadata))

  parsed_goals = parse_goals(ctx, metadata, goals)
  logging.debug('parsed goals: ' + str(parsed_goals))

  sorted_goals = sorted_goals_with_dependencies(ctx, metadata, parsed_goals)
  logging.debug('sorted goals with deps: ' + str(sorted_goals))

  for component, subgoals in sorted_goals:
    if component == 'all':
      run_build_all(ctx, metadata, subgoals)
      continue
    prepare_build(metadata, component)
    run_build(ctx, metadata, component, subgoals)


if __name__ == '__main__':
  main()
+428 −0

File added.

Preview size limit exceeded, changes collapsed.

+48 −0
Original line number Diff line number Diff line
#!/bin/bash

function buffet()
{
    local product variant selection
    if [[ $# -ne 1 ]]; then
        echo "usage: buffet [target]" >&2
        return 1
    fi

    selection=$1
    product=${selection%%-*} # Trim everything after first dash
    variant=${selection#*-} # Trim everything up to first dash

    if [ -z "$product" ]
    then
        echo
        echo "Invalid lunch combo: $selection"
        return 1
    fi

    if [ -z "$variant" ]
    then
        if [[ "$product" =~ .*_(eng|user|userdebug) ]]
        then
            echo "Did you mean -${product/*_/}? (dash instead of underscore)"
        fi
        return 1
    fi

    BUFFET_BUILD_TOP=$(pwd) python3 tools/build/orchestrator/buffet_helper.py $1 || return 1

    export BUFFET_BUILD_TOP=$(pwd)
    export BUFFET_COMPONENTS_TOP=$BUFFET_BUILD_TOP/components
    export BUFFET_TARGET_PRODUCT=$product
    export BUFFET_TARGET_BUILD_VARIANT=$variant
    export BUFFET_TARGET_BUILD_TYPE=release
}

function m()
{
    if [ -z "$BUFFET_BUILD_TOP" ]
    then
        echo "Run \"buffet [target]\" first"
        return 1
    fi
    python3 $BUFFET_BUILD_TOP/tools/build/orchestrator/build_helper.py "$@"
}
+79 −0
Original line number Diff line number Diff line
# Copyright (C) 2022 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#      http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#

import os
import yaml


def parse_hierarchy(build_top):
  """Parse build hierarchy file from given build top directory, and returns a dict from child targets to parent targets.

  Example of hierarchy file:
  ==========
  aosp_arm64:
  - armv8
  - aosp_cf_arm64_phone

  armv8:
  - aosp_oriole
  - aosp_sunfish

  aosp_oriole:
  - oriole

  aosp_sunfish:
  - sunfish

  oriole:
  # leaf

  sunfish:
  # leaf
  ==========

  If we parse this yaml, we get a dict looking like:

  {
      "sunfish": "aosp_sunfish",
      "oriole": "aosp_oriole",
      "aosp_oriole": "armv8",
      "aosp_sunfish": "armv8",
      "armv8": "aosp_arm64",
      "aosp_cf_arm64_phone": "aosp_arm64",
      "aosp_arm64": None, # no parent
  }
  """
  metadata_path = os.path.join(build_top, 'tools', 'build', 'hierarchy.yaml')
  if not os.path.isfile(metadata_path):
    raise RuntimeError("target metadata file %s doesn't exist" % metadata_path)

  with open(metadata_path, 'r') as f:
    hierarchy_yaml = yaml.load(f, Loader=yaml.SafeLoader)

  hierarchy_map = dict()

  for parent_target, child_targets in hierarchy_yaml.items():
    if not child_targets:
      # leaf
      continue
    for child_target in child_targets:
      hierarchy_map[child_target] = parent_target

  for parent_target in hierarchy_yaml:
    # targets with no parent
    if parent_target not in hierarchy_map:
      hierarchy_map[parent_target] = None

  return hierarchy_map
Loading