[meta-freescale] [Documentation][PATCH 03/14] scripts: add scripts to generate release-notes data out of bitbake's cache metadata

Mario Domenech Goulart mario at ossystems.com.br
Wed Apr 30 05:56:38 PDT 2014


Signed-off-by: Mario Domenech Goulart <mario at ossystems.com.br>
---
 release-notes/README                |   39 +++
 scripts/bitbake-metadata2doc.py     |  504 +++++++++++++++++++++++++++++++++++
 scripts/bitbake-metadata2doc.sh     |  105 ++++++++
 scripts/extract-bitbake-metadata.py |  329 +++++++++++++++++++++++
 4 files changed, 977 insertions(+)
 create mode 100644 release-notes/README
 create mode 100644 scripts/bitbake-metadata2doc.py
 create mode 100755 scripts/bitbake-metadata2doc.sh
 create mode 100644 scripts/extract-bitbake-metadata.py

diff --git a/release-notes/README b/release-notes/README
new file mode 100644
index 0000000..ecc3374
--- /dev/null
+++ b/release-notes/README
@@ -0,0 +1,39 @@
+The data used by the Release Notes document is partially generated by
+scripts.
+
+The scripts are in ../scripts. bitbake-metadata.sh is the main script.
+
+bitbake-metadata.sh relies on the existence of the following
+directories:
+
+* the BSP directory: the directory where you initialized the
+  fsl-community-bsp repository
+  (https://github.com/Freescale/fsl-community-bsp-platform) with
+  `repo'
+
+* the gitdm directory: a clone of the
+  https://github.com/OSSystems/gitdm repository (to generate
+  statistics for the Acknowledgements section)
+
+* the Documentation directory: this very repository.  Note that
+  bitbake-metadata.sh assumes Documentation to be found in
+  $BSPDIR/sources
+
+To run bitbake-metadata.sh, change to the scripts directory and run it
+with the BSP directory, the gitdm repository directory, the initial
+tag and the end tag (to gather commit statistics).  Here's an example:
+
+  $ cd ../scripts
+  $ bitbake-metadata.sh ~/src/fsl-community-bsp ~/src/gitdm 1.5 1.6
+
+By default, bitbake-metadata.sh collects data out of bitbake's
+metadata for all machines it can find in the meta-fsl-arm and
+meta-fsl-arm-extra layers.  You can restrict the machines to collect
+data from by setting the MACHINES variable in your environment.
+
+bitbake-metadata.sh will write its output to release-note's `source'
+directory.
+
+To generate the formatted output for the release notes document, you
+need to run "make <target>" from the release-notes directory.  It will
+then use the data generated by bitbake-metadata2doc.sh.
diff --git a/scripts/bitbake-metadata2doc.py b/scripts/bitbake-metadata2doc.py
new file mode 100644
index 0000000..0526985
--- /dev/null
+++ b/scripts/bitbake-metadata2doc.py
@@ -0,0 +1,504 @@
+#! /usr/bin/env python
+# -*- encoding: utf-8 -*-
+
+import os
+import re
+import sys
+import pickle
+import subprocess
+import copy
+import shutil
+
+def info(fmt, *args):
+    print(fmt % args)
+
+def warn(fmt, *args):
+    sys.stderr.write(('WARNING: ' + fmt + '\n') % args)
+
+def error(fmt, *args):
+    sys.stderr.write(('ERROR: ' + fmt + '\n') % args)
+
+def tabularize(lines, spacing=2):
+    def format_border(widths):
+        return spc.join([ '=' * width for width in widths ])
+
+    def format_header(header, widths, spc):
+        border = format_border(widths)
+        header = spc.join(map(lambda col, width: col.ljust(width),
+                              header, widths))
+        return '\n'.join([border, header, border])
+
+    def sort_by_col(lines, col):
+        return sorted(lines, key=lambda l: l[col])
+
+    def format_body(lines, widths, spc):
+        def format_line (line):
+            return spc.join(map(lambda col, width: col.ljust(width),
+                                line, widths))
+        return "\n".join(map(format_line, sort_by_col(lines, 0)))
+
+    spc = ' ' * spacing
+    if lines:
+        col_widths = map(lambda col: apply(max, map(len, col)),
+                         apply(zip, lines))
+        return '\n'.join([format_header(lines[0], col_widths, spc),
+                          format_body(lines[1:], col_widths, spc),
+                          format_border(col_widths)]) + \
+               '\n'
+    else:
+        return ""
+
+def describe(items):
+    text = ''
+    for item in sorted(items):
+        text += ''.join(['* ', '**', item[0], '**: ', item[1], '\n'])
+    return text
+
+def is_in_soc_family(soc, soc_family):
+    return soc in soc_family.split(':')
+
+def is_compatible_machine(soc_family, compatible_machine_re):
+    if compatible_machine_re:
+        socs = soc_family.split(':')
+        compatible_machine_pattern = re.compile(compatible_machine_re)
+        for soc in socs:
+            if compatible_machine_pattern.match(soc):
+                return True
+        return False
+    else:
+        return True
+
+def format_version(version):
+    version = str(version)
+    if 'gitAUTOINC' in version:
+        version_pattern = re.compile('(.*)gitAUTOINC.*')
+        version_number = version_pattern.match(version).groups()[0]
+        return version_number + 'git'
+    else:
+        ## remove <x> in case versions are in the <x>:<y> format
+        comma_prefix = re.compile('\\d+:(.*)')
+        match = comma_prefix.match(version)
+        if match:
+            return match.groups()[0]
+        else:
+            return version
+
+def write_inc_file(out_dir, file, text):
+    out_file = os.path.join(out_dir, file)
+    info('Writing %s' % out_file)
+    out_fd = open(out_file, 'w')
+    out_fd.write(text)
+    out_fd.close()
+
+def write_tabular(out_dir, file, header, body):
+    table = [header] + body
+    write_inc_file(out_dir, file, tabularize([header] + body))
+
+def write_table_by_recipe(out_dir, file, recipe, header, data):
+    body = []
+    for board in data.keys():
+        recipe_data = data[board]['recipes'][recipe]
+        version = format_version(recipe_data['version'])
+        body += [[board, recipe_data['recipe'], version]]
+    write_tabular(out_dir, file, header, body)
+
+def write_linux_default(data, out_dir):
+    write_table_by_recipe(out_dir,
+                          'linux-default.inc',
+                          'virtual/kernel',
+                          ['Board', 'Kernel Provider', 'Kernel Version'],
+                          data)
+
+
+def write_bootloader_default(data, out_dir):
+    boards_bloaders = {}
+    for  board, board_data in data.items():
+        image_bootloader = board_data['image-bootloader']
+        if image_bootloader:
+            boards_bloaders[board] = (image_bootloader, board_data['recipes'][image_bootloader]['version'])
+        elif board_data['recipes'].has_key('u-boot'):
+            bootloader = board_data['recipes']['u-boot']
+            boards_bloaders[board] = (bootloader['recipe'], bootloader['version'])
+        elif board_data['recipes'].has_key('virtual/bootloader'):
+            bootloader = board_data['recipes']['virtual/bootloader']
+            boards_bloaders[board] = (bootloader['recipe'], bootloader['version'])
+        else:
+            error('No bootloader for %s' % (board,))
+            sys.exit(1)
+
+    body = []
+    for board, bootloader in boards_bloaders.items():
+        body.append([board, bootloader[0], format_version(bootloader[1])])
+    write_tabular(out_dir,
+                  'bootloader-default.inc',
+                  ['Board', 'Bootloader', 'Bootloader version'],
+                  body)
+
+def write_fsl_community_bsp_supported_kernels(data, out_dir):
+    kernels = []
+    kernel_recipes = [] # just to keep track of recipes already collected
+    for board, board_data in data.items():
+        kernel = board_data['recipes']['virtual/kernel']
+        recipe = kernel['recipe']
+        recipe_file = kernel['file']
+        if (('/sources/meta-fsl-arm/' in recipe_file) or \
+                ('/sources/meta-fsl-arm-extra/' in recipe_file)) and \
+                recipe not in kernel_recipes:
+            kernels += [[recipe, kernel['description']]]
+            kernel_recipes.append(recipe)
+    write_inc_file(out_dir, 'fsl-community-bsp-supported-kernels.inc', describe(kernels))
+
+def write_fsl_community_bsp_supported_bootloaders_descr(data, out_dir):
+    bootloaders = []
+    bootloader_recipes = [] # just to keep track of recipes already collected
+    for board, board_data in data.items():
+        for bootloader_software in ['u-boot', 'barebox']:
+            if board_data['recipes'].has_key(bootloader_software):
+                bootloader = board_data['recipes'][bootloader_software]
+                recipe = bootloader['recipe']
+                recipe_file = bootloader['file']
+                if (('/sources/meta-fsl-arm/' in recipe_file) or \
+                        ('/sources/meta-fsl-arm-extra/' in recipe_file)) and \
+                        recipe not in bootloader_recipes:
+                    bootloaders += [[recipe, bootloader['description']]]
+                    bootloader_recipes.append(recipe)
+    write_inc_file(out_dir, 'fsl-community-bsp-supported-bootloaders-descr.inc', describe(bootloaders))
+
+def write_userspace_pkg(data, out_dir):
+    pkgs = {'gstreamer': [],
+            'libdrm': [],
+            'udev': []}
+    for board, board_data in data.items():
+        for pkg in pkgs.keys():
+            versions = pkgs[pkg]
+            version = board_data['recipes'][pkg]['version']
+            if version not in versions:
+                pkgs[pkg].append(version)
+
+    ## Check if all the versions are the same for each package
+    multiple_versions = []
+    for pkg, versions in pkgs.items():
+        if len(versions) > 1:
+            multiple_versions.append((pkg, versions))
+    for pkg, vs in multiple_versions:
+        error('multiple versions have been found for %s: %s' % (pkg, ', '.join(map(str, vs))))
+    if multiple_versions:
+        sys.exit(1)
+
+    ## Check if packages are available for all SoCs:
+    pkg_board_restriction = False
+    for pkg in pkgs:
+        for board_data in data.values():
+            compatible_machine = board_data['recipes'][pkg]['compatible-machine']
+            if compatible_machine:
+                pkg_board_restriction = True
+                error('Package %s has restrictions with regard to boards: COMPATIBLE_MACHINE=%s' % (pkg, compatible_machine))
+    if pkg_board_restriction:
+        sys.exit(1)
+
+    ## Finaly write the table
+    write_tabular(out_dir,
+                  'userspace-pkg.inc',
+                  ['Package', 'Board/SoC Family', 'Version'],
+                  [ [pkg, 'All', format_version(version[0])] for pkg, version in pkgs.items() ])
+
+
+def write_soc_pkg(data, out_dir):
+    socs = {'mx28': [],
+            'mx5': [],
+            'mx6sl': [],
+            'mx6dl': [],
+            'vf60': []}
+    pkgs = ['imx-test',
+            'gst-fsl-plugin',
+            'libfslcodec',
+            'libfslparser',
+            'imx-vpu',
+            'imx-lib',
+            'firmware-imx',
+            'mxsldr',
+            'gpu-viv-g2d',
+            'xf86-video-imxfb-vivante',
+            'gpu-viv-bin-mx6q',
+            'directfb',
+            'directfb-examples',
+            'xf86-video-imxfb',
+            'amd-gpu-bin-mx51',
+            'libz160',
+            'amd-gpu-x11-bin-mx51',
+            'libfslvpuwrap',
+            'fsl-alsa-plugins',
+            'gstreamer1.0-plugins-imx',
+            'imx-uuc',
+            'libmcc',
+            'mqxboot']
+    ## Fill the socs dictionary
+    for board, board_data in data.items():
+        soc_family = board_data['soc-family']
+        for soc in socs.keys():
+            if is_in_soc_family(soc, soc_family):
+                socs[soc].append(board)
+    ## Check if the same board is not in multiple SoCs
+    boards_socs = {}
+    board_in_multiple_socs = False
+    for soc, boards in socs.items():
+        for board in boards:
+            if boards_socs.has_key(board):
+                board_in_multiple_socs = True
+                error('Board %s has been found in both %s and %s SoCs' % (board, boards_socs[board], soc))
+            else:
+                boards_socs[board] = soc
+    if board_in_multiple_socs:
+        sys.exit()
+
+    ## Use the most frequent package versions among boards of the same
+    ## SoC, in case of different versions for the same package
+    pkgs_socs_versions = {}
+    for pkg in pkgs:
+        for soc, boards in socs.items():
+            if boards:
+                pkg_versions = {}
+                for board in boards:
+                    if pkg in data[board]['recipes'].keys():
+                        recipe = data[board]['recipes'][pkg]
+                        compatible_machine = recipe['compatible-machine']
+                        if compatible_machine is None:
+                            pkg_versions[board] = recipe['version']
+                        elif (compatible_machine and \
+                              is_compatible_machine(data[board]['soc-family'], compatible_machine)):
+                            pkg_versions[board] = recipe['version']
+                        else:
+                            ## The package is not for that board
+                            pkg_versions[board] = -1
+                    else:
+                        pkg_versions[board] = -1
+                versions = pkg_versions.values()
+                versions_histogram = {}
+                for version in versions:
+                    if versions_histogram.has_key(version):
+                        versions_histogram[version] += 1
+                    else:
+                        versions_histogram[version] = 1
+                versions_freq = versions_histogram.values()
+                most_freq = max(versions_freq)
+                ## More than one "most frequent" version?
+                if versions_freq.count(most_freq) > 1:
+                    error('The most frequent versions (%s) for %s are equally distributed among boards of SoC %s.  Cannot determine which one to use.' % \
+                              ([ ver for ver, count in versions_histogram.items() if count == most_freq ],
+                               pkg,
+                               soc))
+                    sys.exit(1)
+                else:
+                    pkg_version = None
+                    for version, count in versions_histogram.items():
+                        if count == most_freq:
+                            pkg_version = version
+                            break
+                    pkgs_socs_versions[(pkg, soc)] = pkg_version
+
+    ## Build up the table body
+    body = []
+    soc_names = filter(lambda soc: socs[soc], sorted(socs.keys()))
+    for pkg in pkgs:
+        versions = [ pkgs_socs_versions[(pkg, soc)] for soc in soc_names ]
+        def replace_noversions(versions):
+            new_versions = []
+            for v in versions:
+                if v == -1:
+                    new_versions.append('--')
+                else:
+                    new_versions.append(format_version(v))
+            return new_versions
+        body.append([pkg] + replace_noversions(versions))
+
+    ## Finally write the table
+    write_tabular(out_dir,
+                  'soc-pkg.inc',
+                  ['Package name'] + map(lambda soc: 'mx6q / mx6dl' if soc == 'mx6dl' else soc,  soc_names),
+                  body)
+
+
+def write_maintainers_tables(data, out_dir, bsp_dir):
+    meta_fsl_arm_machines_dir = os.path.join(bsp_dir, 'sources', 'meta-fsl-arm', 'conf', 'machine')
+    meta_fsl_arm_extra_machines_dir = os.path.join(bsp_dir, 'sources', 'meta-fsl-arm-extra', 'conf', 'machine')
+    get_maintainer_script = os.path.join(bsp_dir, 'sources', 'meta-fsl-arm', 'scripts', 'get-maintainer')
+    try:
+        get_maintainer_pipe = subprocess.Popen([get_maintainer_script,
+                                                '--dump',
+                                                meta_fsl_arm_machines_dir,
+                                                meta_fsl_arm_extra_machines_dir],
+                                               stdout=subprocess.PIPE)
+    except OSError:
+        error('Could not run the get-maintainer script (attempted %s)' % (get_maintainer_script,))
+        sys.exit(1)
+
+    get_maintainer_output, err = get_maintainer_pipe.communicate()
+    maintained = []
+    not_maintained = []
+    for line in get_maintainer_output.split('\n'):
+        if line == '':
+            continue
+        columns = line.split('\t')
+        len_cols = len(columns)
+        if len_cols == 2:
+            not_maintained.append(columns)
+        elif len_cols == 3:
+            maintained.append(columns[0:2])
+        else:
+            error('write_maintainers_tables: unexpected get-maintainers output format.')
+
+    ## Write the maintained boards file
+    write_tabular(out_dir,
+                  'machines-with-maintainers.inc',
+                  ['Machine', 'Name'],
+                  maintained)
+
+    ## Write the unmaintained boards file
+    write_tabular(out_dir,
+                  'machines-without-maintainers.inc',
+                  ['Machine', 'Name'],
+                  not_maintained)
+
+
+def write_machines_list(data, out_dir, bsp_dir):
+    output_machine_list_script = './output-machine-list'
+    try:
+        output_machine_list_pipe = subprocess.Popen([output_machine_list_script,
+                                                     bsp_dir,
+                                                     'tabularize'],
+                                                    stdout=subprocess.PIPE)
+    except OSError:
+        error('Could not run the output-machine-list script (attempted %s)' % (output_machine_list_script,))
+        sys.exit(1)
+
+    out, err = output_machine_list_pipe.communicate()
+    out_file = os.path.join(out_dir, 'machine-list.inc')
+    info('Writing %s' % out_file)
+    fd = open(out_file, 'w')
+    fd.write(out)
+    fd.close()
+
+
+def write_soc_tree(data, out_dir):
+    soc_families = []
+    for board, board_data in data.items():
+        soc_family = board_data['soc-family']
+        if soc_family not in soc_families:
+            soc_families.append(soc_family)
+
+    max_depth = 2
+    socs = map(lambda i: i[0][0:max_depth],
+               zip(map(lambda soc_family: soc_family.split(':'),
+                       soc_families)))
+
+    def indent(label, level, fd, last=False):
+        if level == 0:
+            padding = '  '
+        else:
+            padding = '  │'
+        if last:
+            corner = '└'
+        else:
+            corner = '├'
+        fd.write(padding + (' ' * 4 * level) + corner + '── ' + label + '\n')
+
+    def print_tree(tree, fd, level=0):
+        parents = sorted(tree.keys())
+        len_parents = len(parents)
+        for i, parent in enumerate(parents):
+            indent(parent, level, fd, i == len_parents -1)
+            children = tree[parent]
+            if children:
+                print_tree(children, fd, level + 1)
+
+    def dict_merge(a, b):
+        if not isinstance(b, dict):
+            return b
+        result = copy.deepcopy(a)
+        for k, v in b.iteritems():
+            if k in result and isinstance(result[k], dict):
+                    result[k] = dict_merge(result[k], v)
+            else:
+                result[k] = copy.deepcopy(v)
+        return result
+
+    def socs2dict(socs):
+        tree = {}
+        for branch in socs:
+            tmp = {}
+            reduce(lambda d, key: d.setdefault(key, {}), branch, tmp)
+            tree = dict_merge(tree, tmp)
+        return tree
+
+    out_file = os.path.join(out_dir, 'soc-tree.inc')
+    info('Writing %s' % out_file)
+    fd = open(out_file, 'w')
+    fd.write('.. code-block:: none\n\n')
+    fd.write('  SoCs\n')
+    print_tree(socs2dict(socs), fd)
+    fd.close()
+
+def write_acknowledgements(out_dir, bsp_dir, gitdm_dir, start_commit, end_commit):
+    meta_freescale_dir = os.path.join(gitdm_dir, 'meta-freescale')
+    gen_statistics_script = os.path.join(meta_freescale_dir, 'gen-statistics')
+    anchor = os.getcwd()
+    try:
+        os.chdir(meta_freescale_dir)
+        subprocess.call([gen_statistics_script,
+                         bsp_dir,
+                         start_commit,
+                         end_commit],
+                        stdout=subprocess.PIPE)
+        os.chdir(anchor)
+    except OSError:
+        error('Could not run the gen-statistics script (attempted %s)' % (gen_statistics_script,))
+        sys.exit(1)
+
+    out_file = os.path.join(out_dir, 'ack-sourcers.inc')
+    info('Writing %s' % out_file)
+    shutil.copyfile(os.path.join(meta_freescale_dir, 'results.all.txt'),
+                    out_file)
+
+
+def usage(exit_code=None):
+    print 'Usage: %s <data file> <output dir> <bsp dir> <gitdb dir> <start commit> <end commit>' % (os.path.basename(sys.argv[0]),)
+    if exit_code:
+        sys.exit(exit_code)
+
+
+if '-h' in sys.argv or '-help' in sys.argv or '--help' in sys.argv:
+    usage(0)
+
+if len(sys.argv) < 6:
+    usage(1)
+
+data_file = sys.argv[1]
+out_dir = sys.argv[2]
+bsp_dir = sys.argv[3]
+gitdm_dir = sys.argv[4]
+start_commit = sys.argv[5]
+end_commit = sys.argv[6]
+
+data_fd = open(data_file, 'r')
+data = pickle.load(data_fd)
+data_fd.close()
+
+try:
+    os.mkdir(out_dir)
+except:
+    if not os.path.isdir(out_dir):
+        sys.stderr.write('A file named %s already exists. Aborting.' % out_dir)
+        sys.exit(1)
+    else:
+        pass # if a directory already exists, it's ok
+
+write_linux_default(data, out_dir)
+write_fsl_community_bsp_supported_kernels(data, out_dir)
+write_fsl_community_bsp_supported_bootloaders_descr(data, out_dir)
+write_bootloader_default(data, out_dir)
+write_userspace_pkg(data, out_dir)
+write_soc_pkg(data, out_dir)
+write_maintainers_tables(data, out_dir, bsp_dir)
+write_machines_list(data, out_dir, bsp_dir)
+write_soc_tree(data, out_dir)
+write_acknowledgements(out_dir, bsp_dir, gitdm_dir, start_commit, end_commit)
diff --git a/scripts/bitbake-metadata2doc.sh b/scripts/bitbake-metadata2doc.sh
new file mode 100755
index 0000000..9e21ee0
--- /dev/null
+++ b/scripts/bitbake-metadata2doc.sh
@@ -0,0 +1,105 @@
+#! /bin/bash
+
+### This script generates documentation based on metadata extracted
+### out of BitBake's cache data.
+###
+### It basically sources setup-environment and run
+### extract-bitbake-metadata.py for each given machine (if the
+### MACHINES environment variable is set, uses it, otherwise find
+### machine files in yocto_dir). extract-bitbake-metadata.py collects
+### data from the BitBake cache for each machine and writes a file
+### (doc-data.pckl, in Python's pickle format) which is eventually
+### used by bitbake-metadata2doc.py to transform all the collected
+### data into documentation in rst format.
+
+# Check if running from the scripts dir
+if [ "`basename $PWD`" != "scripts" ]; then
+    echo "This script is expected to be run from the scripts directory" >&2
+    exit 1
+fi
+
+usage() {
+    local exit_code
+    local output
+    [ -n $1 ] && exit_code=$1
+    if [ -n "$exit_code" ] && [ "$exit_code" != "0" ]; then
+        output=2
+    else
+        output=1
+    fi
+    echo "Usage: `basename $0` <yocto directory> <gitdm directory> <start commit> <end commit>" >&$output
+    [ -n "$exit_code" ] && exit $exit_code
+}
+
+
+[ -z "$4" ] && usage 1
+
+if [ "$1" = "-h" ] || [ "$1" = "-help" ] || [ "$1" = "--help" ]; then
+    usage 0
+fi
+
+yocto_dir="$1"
+gitdm_dir="$2"
+start_commit="$3"
+end_commit="$4"
+anchor="`pwd`"
+
+machines=
+if [ -n "$MACHINES" ]; then
+    machines="$MACHINES"
+else
+    machines=`./output-machine-list $yocto_dir`
+fi
+
+marshalled_data_file=doc-data.pckl
+
+rm -f $anchor/$marshalled_data_file
+
+for machine in $machines; do
+    cd $yocto_dir
+    MACHINE=$machine . ./setup-environment build
+
+    MACHINE=$machine python $anchor/extract-bitbake-metadata.py \
+        $anchor/$marshalled_data_file \
+        amd-gpu-bin-mx51 \
+        amd-gpu-x11-bin-mx51 \
+        barebox \
+        directfb \
+        directfb-examples \
+        libdrm \
+        firmware-imx \
+        fsl-alsa-plugins \
+        gpu-viv-bin-mx6q \
+        gpu-viv-g2d \
+        gst-fsl-plugin \
+        gstreamer \
+        gstreamer1.0-plugins-imx \
+        imx-kobs \
+        imx-lib \
+        imx-test \
+        imx-uuc \
+        imx-vpu \
+        libfslcodec \
+        libfslparser \
+        libfslvpuwrap \
+        libmcc \
+        libz160 \
+        mqxboot \
+        mxsldr \
+        virtual/kernel \
+        virtual/bootloader \
+        udev \
+        u-boot \
+        xserver-xorg \
+        xf86-dri-vivante \
+        xf86-video-imxfb \
+        xf86-video-imxfb-vivante
+    ret=$?
+    if [ "$ret" != "0" ]; then
+        echo "ERROR: error extracting bitbake metadata for board $MACHINE"
+        exit 1
+    fi
+done
+
+cd $anchor
+python ./bitbake-metadata2doc.py $marshalled_data_file "../release-notes/source" "$yocto_dir" "$gitdm_dir" "$start_commit" "$end_commit"
diff --git a/scripts/extract-bitbake-metadata.py b/scripts/extract-bitbake-metadata.py
new file mode 100644
index 0000000..695e8a2
--- /dev/null
+++ b/scripts/extract-bitbake-metadata.py
@@ -0,0 +1,329 @@
+#!/usr/bin/env python
+
+""" From https://github.com/kergoth/bb/blob/master/libexec/bbcmd.py """
+
+import argparse
+import contextlib
+import logging
+import os
+import sys
+import warnings
+
+PATH = os.getenv('PATH').split(':')
+bitbake_paths = [os.path.join(path, '..', 'lib')
+                 for path in PATH if os.path.exists(os.path.join(path, 'bitbake'))]
+if not bitbake_paths:
+    raise ImportError("Unable to locate bitbake, please ensure PATH is set correctly.")
+
+sys.path[0:0] = bitbake_paths
+
+import bb.msg
+import bb.utils
+import bb.providers
+import bb.tinfoil
+from bb.cookerdata import CookerConfiguration, ConfigParameters
+
+
+class Terminate(BaseException):
+    pass
+
+
+class Tinfoil(bb.tinfoil.Tinfoil):
+    def __init__(self, output=sys.stdout):
+        # Needed to avoid deprecation warnings with python 2.6
+        warnings.filterwarnings("ignore", category=DeprecationWarning)
+
+        # Set up logging
+        self.logger = logging.getLogger('BitBake')
+        if output is not None:
+            setup_log_handler(self.logger, output)
+
+        self.config = self.config = CookerConfiguration()
+        configparams = bb.tinfoil.TinfoilConfigParameters(parse_only=True)
+        self.config.setConfigParameters(configparams)
+        self.config.setServerRegIdleCallback(self.register_idle_function)
+        self.cooker = bb.cooker.BBCooker(self.config)
+        self.config_data = self.cooker.data
+        bb.providers.logger.setLevel(logging.ERROR)
+        bb.taskdata.logger.setLevel(logging.CRITICAL)
+        self.cooker_data = None
+        self.taskdata = None
+
+        self.localdata = bb.data.createCopy(self.config_data)
+        self.localdata.finalize()
+        # TODO: why isn't expandKeys a method of DataSmart?
+        bb.data.expandKeys(self.localdata)
+
+
+    def prepare_taskdata(self, provided=None, rprovided=None):
+        self.cache_data = self.cooker.recipecache
+        if self.taskdata is None:
+            self.taskdata = bb.taskdata.TaskData(abort=False)
+
+        if provided:
+            self.add_provided(provided)
+
+        if rprovided:
+            self.add_rprovided(rprovided)
+
+    def add_rprovided(self, rprovided):
+        for item in rprovided:
+            self.taskdata.add_rprovider(self.localdata, self.cache_data, item)
+
+        self.taskdata.add_unresolved(self.localdata, self.cache_data)
+
+    def add_provided(self, provided):
+        if 'world' in provided:
+            if not self.cache_data.world_target:
+                self.cooker.buildWorldTargetList()
+            provided.remove('world')
+            provided.extend(self.cache_data.world_target)
+
+        if 'universe' in provided:
+            provided.remove('universe')
+            provided.extend(self.cache_data.universe_target)
+
+        for item in provided:
+            self.taskdata.add_provider(self.localdata, self.cache_data, item)
+
+        self.taskdata.add_unresolved(self.localdata, self.cache_data)
+
+    def rec_get_dependees(self, targetid, depth=0, seen=None):
+        if seen is None:
+            seen = set()
+
+        for dependee_fnid, dependee_id in self.get_dependees(targetid, seen):
+            yield dependee_id, depth
+
+            for _id, _depth in self.rec_get_dependees(dependee_id, depth+1, seen):
+                yield _id, _depth
+
+    def get_dependees(self, targetid, seen):
+        dep_fnids = self.taskdata.get_dependees(targetid)
+        for dep_fnid in dep_fnids:
+            if dep_fnid in seen:
+                continue
+            seen.add(dep_fnid)
+            for target in self.taskdata.build_targets:
+                if dep_fnid in self.taskdata.build_targets[target]:
+                    yield dep_fnid, target
+
+    def get_buildid(self, target):
+        if not self.taskdata.have_build_target(target):
+            if target in self.cooker.recipecache.ignored_dependencies:
+                return
+
+            reasons = self.taskdata.get_reasons(target)
+            if reasons:
+                self.logger.error("No buildable '%s' recipe found:\n%s", target, "\n".join(reasons))
+            else:
+                self.logger.error("No '%s' recipe found", target)
+            return
+        else:
+            return self.taskdata.getbuild_id(target)
+
+    def target_filenames(self):
+        """Return the filenames of all of taskdata's targets"""
+        filenames = set()
+
+        for targetid in self.taskdata.build_targets:
+            fnid = self.taskdata.build_targets[targetid][0]
+            fn = self.taskdata.fn_index[fnid]
+            filenames.add(fn)
+
+        for targetid in self.taskdata.run_targets:
+            fnid = self.taskdata.run_targets[targetid][0]
+            fn = self.taskdata.fn_index[fnid]
+            filenames.add(fn)
+
+        return filenames
+
+    def all_filenames(self):
+        return self.cooker.recipecache.file_checksums.keys()
+
+    def all_preferred_filenames(self):
+        """Return all the recipes we have cached, filtered by providers.
+
+        Unlike target_filenames, this doesn't operate against taskdata.
+        """
+        filenames = set()
+        excluded = set()
+        for provide, fns in self.cooker.recipecache.providers.iteritems():
+            eligible, foundUnique = bb.providers.filterProviders(fns, provide,
+                                                                 self.localdata,
+                                                                 self.cooker.recipecache)
+            preferred = eligible[0]
+            if len(fns) > 1:
+                # Excluding non-preferred providers in multiple-provider
+                # situations.
+                for fn in fns:
+                    if fn != preferred:
+                        excluded.add(fn)
+            filenames.add(preferred)
+
+        filenames -= excluded
+        return filenames
+
+    def provide_to_fn(self, provide):
+        """Return the preferred recipe for the specified provide"""
+        filenames = self.cooker.recipecache.providers[provide]
+        eligible, foundUnique = bb.providers.filterProviders(filenames, provide, self.localdata)
+        return eligible[0]
+
+    def build_target_to_fn(self, target):
+        """Given a target, prepare taskdata and return a filename"""
+        self.prepare_taskdata([target])
+        targetid = self.get_buildid(target)
+        if targetid is None:
+            return
+        fnid = self.taskdata.build_targets[targetid][0]
+        fn = self.taskdata.fn_index[fnid]
+        return fn
+
+    def parse_recipe_file(self, recipe_filename):
+        """Given a recipe filename, do a full parse of it"""
+        appends = self.cooker.collection.get_file_appends(recipe_filename)
+        try:
+            recipe_data = bb.cache.Cache.loadDataFull(recipe_filename,
+                                                      appends,
+                                                      self.config_data)
+        except Exception:
+            raise
+        return recipe_data
+
+    def parse_metadata(self, recipe=None):
+        """Return metadata, either global or for a particular recipe"""
+        if recipe:
+            self.prepare_taskdata([recipe])
+            filename = self.build_target_to_fn(recipe)
+            return self.parse_recipe_file(filename)
+        else:
+            return self.localdata
+
+
+class CompleteParser(argparse.ArgumentParser):
+    """Argument parser which handles '--complete' for completions"""
+    def __init__(self, *args, **kwargs):
+        self.complete_parser = argparse.ArgumentParser(add_help=False)
+        self.complete_parser.add_argument('--complete', action='store_true')
+        super(CompleteParser, self).__init__(*args, **kwargs)
+
+    def parse_args(self, args=None, namespace=None):
+        parsed, remaining = self.complete_parser.parse_known_args(args)
+        if parsed.complete:
+            for action in self._actions:
+                for string in action.option_strings:
+                    print(string)
+        else:
+            return super(CompleteParser, self).parse_args(remaining, namespace)
+
+
+def iter_uniq(iterable):
+    """Yield unique elements of an iterable"""
+    seen = set()
+    for i in iterable:
+        if i not in seen:
+            seen.add(i)
+            yield i
+
+
+ at contextlib.contextmanager
+def status(message, outfile=sys.stderr):
+    """Show the user what we're doing, and whether we succeed"""
+    outfile.write('{0}..'.format(message))
+    outfile.flush()
+    try:
+        yield
+    except KeyboardInterrupt:
+        outfile.write('.interrupted\n')
+        raise
+    except Terminate:
+        outfile.write('.terminated\n')
+        raise
+    except BaseException:
+        outfile.write('.failed\n')
+        raise
+    outfile.write('.done\n')
+
+
+def setup_log_handler(logger, output=sys.stderr):
+    log_format = bb.msg.BBLogFormatter("%(levelname)s: %(message)s")
+    if output.isatty() and hasattr(log_format, 'enable_color'):
+        log_format.enable_color()
+    handler = logging.StreamHandler(output)
+    handler.setFormatter(log_format)
+
+    bb.msg.addDefaultlogFilter(handler)
+    logger.addHandler(handler)
+    logger.setLevel(logging.INFO)
+
+
+def sigterm_exception(signum, stackframe):
+    raise Terminate()
+
+###### end of bbcmd
+
+import pickle
+
+def load_data(data_file):
+    try:
+        fd = open(data_file, 'r')
+        data = pickle.load(fd)
+        fd.close()
+        return data
+    except:
+        return {}
+
+def dump_data(data, data_file):
+    fd = open(data_file, 'w')
+    pickle.dump(data, fd)
+    fd.close()
+
+def extract_bitbake_metadata(recipes):
+    # tinfoil sets up log output for the bitbake loggers, but bb uses
+    # a separate namespace at this time
+    setup_log_handler(logging.getLogger('bb'))
+
+    tinfoil = Tinfoil(output=sys.stderr)
+    tinfoil.prepare(config_only=True)
+
+    tinfoil.parseRecipes()
+
+    data = {}
+
+    metadata = tinfoil.parse_metadata()
+    machine = metadata.getVar('MACHINE')
+    data['image-bootloader'] = metadata.getVar('IMAGE_BOOTLOADER')
+    data['soc-family'] = metadata.getVar('SOC_FAMILY')
+    data['recipes'] = {}
+
+    metadata = None
+    for recipe in recipes:
+        try:
+            metadata = tinfoil.parse_metadata(recipe)
+        except:
+            continue
+
+        data['recipes'][recipe] = {}
+        data['recipes'][recipe]['recipe'] = metadata.getVar('PN', True)
+        data['recipes'][recipe]['version'] = metadata.getVar('PV', True)
+        data['recipes'][recipe]['file'] = tinfoil.build_target_to_fn(recipe)
+        data['recipes'][recipe]['srcbranch'] = metadata.getVar('SRCBRANCH', True)
+        data['recipes'][recipe]['compatible-machine'] = metadata.getVar('COMPATIBLE_MACHINE', True)
+
+        description = metadata.getVar('DESCRIPTION', True)
+        if not description:
+            description = metadata.getVar('SUMMARY', True)
+        data['recipes'][recipe]['description'] = description
+
+    return {machine: data}
+
+logger = logging.getLogger('bb.dump')
+
+data_file = sys.argv[1]
+user_recipes = sys.argv[2:]
+
+data = load_data(data_file)
+data.update(extract_bitbake_metadata(user_recipes))
+
+dump_data(data, data_file)
-- 
1.7.10.4



More information about the meta-freescale mailing list