<html><head><meta name="color-scheme" content="light dark"></head><body><pre style="word-wrap: break-word; white-space: pre-wrap;">#!/usr/bin/python3
# -*- coding: utf-8 -*-
# Generate CVE OVAL from CVE metadata files
#
# Author: David Ries &lt;ries@jovalcm.com&gt;
# Author: Joy Latten &lt;joy.latten@canonical.com&gt;
# Author: Steve Beattie &lt;steve.beattie@canonical.com&gt;
# Copyright (C) 2015 Farnam Hall Ventures LLC
# Copyright (C) 2019 Canonical Ltd.
#
# This script is distributed under the terms and conditions of the GNU General
# Public License, Version 2 or later. See http://www.gnu.org/copyleft/gpl.html
# for details.
#
# Example usage:
# $ sudo apt-get install libopenscap8
# $ oscap info ./com.ubuntu.trusty.cve.oval.xml
# $ oscap oval generate report ./com.ubuntu.trusty.cve.oval.xml
#
# Requires 5.11.1 in /usr/share/openscap/schemas/oval/ but also openscap to
# support dpkg version comparisons. These will hopefully be part of openscap
# 1.3
# $ oscap oval eval --report /tmp/oval-report.html \
#     ./com.ubuntu.trusty.cve.oval.xml

from __future__ import print_function, unicode_literals

import argparse
import functools
import glob
import json
import os
import re
import sys
import tempfile
#from launchpadlib.launchpad import Launchpad

import apt_pkg
from cve_lib import (kernel_srcs, get_orig_rel_name, load_cve, get_subproject_details, PRODUCT_UBUNTU, all_releases, eol_releases, devel_release, release_parent, release_name, release_ppa, release_progenitor)
from kernel_lib import (meta_kernels, kernel_package_abi, kernel_package_version)
import oval_lib
import lpl_common

# cope with apt_pkg api changes.
if 'init_system' in dir(apt_pkg):
    apt_pkg.init_system()
else:
    apt_pkg.InitSystem()

supported_products = [PRODUCT_UBUNTU, 'esm', 'esm-infra', 'esm-apps', 'fips', 'fips-updates']
supported_releases = []
for r in set(all_releases).difference(set(eol_releases)).difference(set([devel_release])):
    _, product, _, _ = get_subproject_details(r)
    if product in supported_products:
        if r not in supported_releases:
            supported_releases.append(r)
        parent = release_parent(r)
        if parent and parent not in supported_releases:
            supported_releases.append(parent)

default_cves_to_process = ['active/CVE-*', 'retired/CVE-*']

packages_to_ignore = ("-dev", "-doc", "-dbg", "-dbgsym", "-udeb", "-locale-")

debug_level = 0


def main():
    """ parse command line options and iterate through files to be processed
    """
    global debug_level

    # parse command line options
    parser = argparse.ArgumentParser(description='Generate CVE OVAL from ' +
                                     'CVE metadata files.')
    parser.add_argument('pathname', nargs='*',
                        help='pathname patterns (globs) specifying CVE ' +
                             'metadata files to be converted into OVAL ' +
                             '(default: "./active/CVE-*" "./retired/CVE-*")')
    parser.add_argument('--oci', action='store_true',
                        help='Also generate OVAL files for scanning Official Cloud Image manifests')
    parser.add_argument('--output-dir', nargs='?', default='./',
                        help='output directory for reports (default is ./)')
    parser.add_argument('--oci-output-dir', nargs='?',
                        help='output directory for OCI manifest OVAL files (default is to use the same directory as --output-dir)')
    parser.add_argument('--oci-prefix', nargs='?', default='oci.',
                        help='Prefix to use for OCI manifest OVAL files names (required if oci-output-dir is the same as output-dir)')
    parser.add_argument('--cve-prefix-dir', nargs='?', default='./',
                        help='location of CVE metadata files to process ' +
                        '(default is ./)')
    parser.add_argument('--no-progress', action='store_true',
                        help='do not show progress meter')
    parser.add_argument('--pkg-cache', action='store', default="pkg_cache.json",
                        help='cache location for binary packages')
    parser.add_argument('--force-cache-reload', action='store_true',
                        help='force reload of cache file')
    parser.add_argument('-d', '--debug', action='count', default=0,
                        help="report debugging information")
    parser.add_argument('--usn-oval', action='store_true',
                        help='generates oval from the USN database')
    parser.add_argument('--usn-db-dir', default='./', type=str,
                        help='location of USN database.json to process ' +
                        '(default is ./)')
    parser.add_argument('--usn-number', default=None, type=str,
                        help='if passed specifics a USN for the oval_usn generator')
    parser.add_argument('--usn-oval-release', default=None, type=str,
                        help='specifies a release to generate the oval usn')
    parser.add_argument('--packages', nargs='+', action='store', default=None,
                        help='generates oval for specific packages. Only for'+
                        ' CVE OVAL')

    args = parser.parse_args()
    pathnames = args.pathname or default_cves_to_process
    debug_level = args.debug

    # debugging; caution, can expose credentials
    if debug_level &gt;= 2:
        import httplib2
        httplib2.debuglevel = 1

    # create oval generators for each supported release
    outdir = './'
    if args.output_dir:
        outdir = args.output_dir
        if not os.path.isdir(outdir):
            raise FileNotFoundError("Could not find '%s'" % outdir)
    if args.oci:
        if args.oci_output_dir:
            ocioutdir = args.oci_output_dir
        else:
            ocioutdir = args.output_dir
        if not os.path.isdir(ocioutdir):
            raise FileNotFoundError("Could not find '%s'" % ocioutdir)
        ociprefix = args.oci_prefix
        if outdir == ocioutdir and len(ociprefix) &lt; 1:
            raise ValueError("oci-prefix must be set when output-dir and oci-output-dir are the same")

    if args.usn_oval:
        if args.oci:
            generate_oval_usn(args.output_dir, args.usn_number, args.usn_oval_release,
                              args.cve_prefix_dir, args.usn_db_dir, ociprefix, ocioutdir)
        else:
            generate_oval_usn(args.output_dir, args.usn_number, args.usn_oval_release,
                              args.cve_prefix_dir, args.usn_db_dir)

        return

    ovals = dict()
    for i in supported_releases:
        # we can have nested parent releases
        parent = release_progenitor(i)
        index = '{0}_dpkg'.format(i)
        ovals[index] = oval_lib.OvalGenerator(i, release_name(i), parent, warn, outdir, prefix='', oval_format='dpkg')
        ovals[index].add_release_applicability_definition()
        if args.oci:
            index = '{0}_oci'.format(i)
            ovals[index] = oval_lib.OvalGenerator(i, release_name(i), parent, warn, ocioutdir, prefix=ociprefix, oval_format='oci')
            ovals[index].add_release_applicability_definition()

    # set up cachefile
    cache = PackageCache(args.pkg_cache, args.force_cache_reload)

    # loop through all CVE data files
    files = []
    for pathname in pathnames:
        files = files + glob.glob(os.path.join(args.cve_prefix_dir, pathname))
    files.sort()

    pkg_filter = None
    if args.packages:
        pkg_filter = args.packages

    files_count = len(files)
    for i_file, filepath in enumerate(files):
        cve_data = parse_cve_file(filepath, cache, pkg_filter)

        # skip CVEs without packages for supported releases
        if not cve_data['packages']:
            if not args.no_progress:
                progress_bar(i_file + 1, files_count)
            continue

        for i in ovals:
            ovals[i].generate_cve_definition(cve_data)

        if not args.no_progress:
            progress_bar(i_file + 1, files_count)

    for i in ovals:
        ovals[i].write_to_file()

    cache.write_cache()


def parse_package_status(release, package, status_text, filepath, cache):
    """ parse ubuntu package status string format:
          &lt;status code&gt; (&lt;version/notes&gt;)
        outputs dictionary: {
          'status'        : '&lt;not-applicable | unknown | vulnerable | fixed&gt;',
          'note'          : '&lt;description of the status&gt;',
          'fix-version'   : '&lt;version with issue fixed, if applicable&gt;',
          'bin-pkgs'      : []
        } """

    # break out status code and detail
    status_sections = status_text.strip().split(' ', 1)
    code = status_sections[0].strip().lower()
    detail = status_sections[1].strip('()') if len(status_sections) &gt; 1 else None

    status = {}
    note_end = " (note: '{0}').".format(detail) if detail else '.'
    if code != 'dne':
        if detail and detail.isdigit() and code in ['released', 'not-affected']:
            status['bin-pkgs'] = cache.get_binarypkgs(package, release, version=detail)
        else:
            status['bin-pkgs'] = cache.get_binarypkgs(package, release)

    if code == 'dne':
        status['status'] = 'not-applicable'
        status['note'] = \
            " package does not exist in {0}{1}".format(release, note_end)
    elif code == 'ignored':
        status['status'] = 'vulnerable'
        status['note'] = ": while related to the CVE in some way, a decision has been made to ignore this issue{0}".format(note_end)
    elif code == 'not-affected':
        # check if there is a release version and if so, test for
        # package existence with that version
        if detail and detail[0].isdigit():
            status['status'] = 'fixed'
            status['note'] = " package in {0}, is related to the CVE in some way and has been fixed{1}".format(release, note_end)
            status['fix-version'] = detail
        else:
            status['status'] = 'not-vulnerable'
            status['note'] = " package in {0}, while related to the CVE in some way, is not affected{1}".format(release, note_end)
    elif code == 'needed':
        status['status'] = 'vulnerable'
        status['note'] = \
            " package in {0} is affected and needs fixing{1}".format(release, note_end)
    elif code == 'pending':
        # pending means that packages have been prepared and are in
        # -proposed or in a ppa somewhere, and should have a version
        # attached. If there is a version, test for package existence
        # with that version, otherwise mark as vulnerable
        if detail and detail[0].isdigit():
            status['status'] = 'fixed'
            status['note'] = " package in {0} is affected. An update containing the fix has been completed and is pending publication{1}".format(release, note_end)
            status['fix-version'] = detail
        else:
            status['status'] = 'vulnerable'
            status['note'] = " package in {0} is affected. An update containing the fix has been completed and is pending publication{1}".format(release, note_end)
    elif code == 'deferred':
        status['status'] = 'vulnerable'
        status['note'] = " package in {0} is affected, but a decision has been made to defer addressing it{1}".format(release, note_end)
    elif code in ['released']:
        # if there isn't a release version, then just mark
        # as vulnerable to test for package existence
        if not detail:
            status['status'] = 'vulnerable'
            status['note'] = " package in {0} was vulnerable and has been fixed, but no release version available for it{1}".format(release, note_end)
        else:
            status['status'] = 'fixed'
            status['note'] = " package in {0} was vulnerable but has been fixed{1}".format(release, note_end)
            status['fix-version'] = detail
    elif code == 'needs-triage':
        status['status'] = 'vulnerable'
        status['note'] = " package in {0} is affected and may need fixing{1}".format(release, note_end)
    else:
        warn('Unsupported status "{0}" in {1}_{2} in "{3}". Setting to "unknown".'.format(code, release, package, filepath))
        status['status'] = 'unknown'
        status['note'] = " package in {0} has a vulnerability that is not known (status: '{1}'). It is pending evaluation{2}".format(release, code, note_end)

    return status


# given a status generated by parse_package_status(), duplicate it for a
# different source package, computing binary packages for the new source
# package
def duplicate_package_status(release, package, original_status, cache, override_version=None):
    copied_status = {}
    copied_status['status'] = original_status['status']
    copied_status['note'] = original_status['note']
    if override_version:
        copied_status['fix-version'] = override_version
    elif 'fix-version' in original_status:
        copied_status['fix-version'] = original_status['fix-version']

    if 'fix-version' in copied_status and copied_status['fix-version'].isdigit():
        copied_status['bin-pkgs'] = cache.get_binarypkgs(package, release, version=copied_status['fix-version'])
    else:
        copied_status['bin-pkgs'] = cache.get_binarypkgs(package, release)

    return copied_status


# returns True if we should ignore this source package; primarily used
# for -edge kernels
def ignore_source_package(source):
    if re.match('linux-.*-edge$', source):
        return True
    return False


def parse_cve_file(filepath, cache, pkg_filter=None):
    """ parse CVE data file into a dictionary using cve_lib """

    cve_header_data = {
        'Candidate': '',
        'CRD': '',
        'PublicDate': '',
        'PublicDateAtUSN': '',
        'References': [get_cve_url(filepath)],
        'Description': '',
        'Ubuntu-Description': '',
        'Notes': '',
        'Mitigation': '',
        'Bugs': [],
        'Priority': '',
        'Discovered-by': '',
        'Assigned-to': '',
        'CVSS': '',
        'Unknown-Fields': [],
        'Source-note': filepath
    }

    data = load_cve(filepath)
    # first try a naive translation of fields
    for f in cve_header_data:
        try:
            cve_header_data[f] = data[f]
        except KeyError:
            pass

    # then handle any particular fields which are expected to have a
    # different format
    cve_header_data['Description'] = cve_header_data['Description'].strip().replace('\n', ' ')
    cve_header_data['Ubuntu-Description'] = cve_header_data['Ubuntu-Description'].strip().replace('\n', ' ')
    cve_header_data['References'] = [ref.strip() for ref in cve_header_data['References'].split('\n') if len(ref.strip()) &gt; 0]
    cve_header_data['References'].insert(0, get_cve_url(filepath))
    cve_header_data['Bugs'] = [bug.strip() for bug in cve_header_data['Bugs'].split('\n') if len(bug.strip()) &gt; 0]
    cve_header_data['Notes'] = ' '.join(user + '&gt; ' + note.replace('\n', ' ') for user, note in cve_header_data['Notes'])
    cve_header_data['CVSS'] = ' '.join(cvss['source'] + ': ' + cvss['vector'] for cvss in cve_header_data['CVSS'])

    packages = {}
    for pkg in data['pkgs']:
        if ignore_source_package(pkg):
            continue
        if pkg_filter:
            if pkg not in pkg_filter:
                continue

        packages[pkg] = {'Releases': {},
                         'Priority': '',
                         'Tags': []}
        for rel in data['pkgs'][pkg]:
            if rel in ['upstream', 'devel']:
                continue
            if rel not in supported_releases:
                continue
            try:
                _, product, _, _ = get_subproject_details(rel)
                if product not in supported_products:
                    continue
            except KeyError:
                continue
            state, details = data['pkgs'][pkg][rel]
            status_line = state
            if len(details) &gt; 0:
                status_line += ' (' + details + ')'
            packages[pkg]['Releases'][rel] = parse_package_status(rel, pkg, status_line, filepath, cache)

    # add supplemental packages; usually kernels only need this special case.
    for package in [name for name in packages if name in kernel_srcs]:
        for release in [
            rel for rel in packages[package]['Releases']
            if packages[package]['Releases'][rel]['status'] != 'not-applicable'
        ]:
            # add meta package
            meta_pkg = meta_kernels.get_meta(release, package, quiet=(debug_level &lt; 1))
            if meta_pkg:
                if meta_pkg not in packages:
                    packages[meta_pkg] = {
                        'Priority': packages[package]['Priority'],
                        'Tags': packages[package]['Tags'],
                        'Releases': {}
                    }
                if release not in packages[meta_pkg]['Releases']:
                    kernel_status = packages[package]['Releases'][release]
                    # kernel meta packages have a different versioning
                    # scheme derived from the kernel version + kernel abi
                    meta_version = None
                    if 'fix-version' in kernel_status:
                        meta_version = '%s.%s' % (kernel_package_version(kernel_status['fix-version']),
                                                  kernel_package_abi(kernel_status['fix-version']))
                    packages[meta_pkg]['Releases'][release] = \
                        duplicate_package_status(release, meta_pkg, kernel_status, cache, override_version=meta_version)
            # add signed package
            signed_pkg = meta_kernels.get_signed(release, package, quiet=(debug_level &lt; 1))
            if signed_pkg:
                if signed_pkg not in packages:
                    packages[signed_pkg] = {
                        'Priority': packages[package]['Priority'],
                        'Tags': packages[package]['Tags'],
                        'Releases': {}
                    }
                if release not in packages[signed_pkg]['Releases']:
                    packages[signed_pkg]['Releases'][release] = \
                        duplicate_package_status(release, signed_pkg, packages[package]['Releases'][release], cache)

    # if subproject is based on an ubuntu release
    for pkg in packages:
        # if subproject is DNE for a given package, then copy parent's status
        for rel in packages[pkg]['Releases']:
            parent = release_parent(rel)
            if parent and parent in packages[pkg]['Releases']:
                if packages[pkg]['Releases'][parent]['status'] != 'not-applicable' and packages[pkg]['Releases'][rel]['status'] == 'not-applicable':
                    packages[pkg]['Releases'][rel] = \
                        duplicate_package_status(parent, pkg, packages[pkg]['Releases'][parent], cache)
        # if supported release not in CVE file, then copy parent's status
        for rel in supported_releases:
            if rel not in packages[pkg]['Releases']:
                parent = release_parent(rel)
                if parent and parent in packages[pkg]['Releases']:
                    packages[pkg]['Releases'][rel] = \
                        duplicate_package_status(parent, pkg, packages[pkg]['Releases'][parent], cache)

    return {'header': cve_header_data, 'packages': packages}


def get_cve_url(filepath):
    """ returns a url to CVE data from a filepath """
    path = os.path.realpath(filepath).split(os.sep)
    url = "https://ubuntu.com/security"
    cve = path[-1]
    return "%s/%s" % (url, cve)


def warn(message):
    """ print a warning message """
    sys.stdout.write('\rWARNING: {0}\n'.format(message))

def error(message):
    """ print a error message """
    sys.stderr.write('\rERROR: {0}\n'.format(message))
    sys.exit(1)

def debug(message):
    """ print a debuging message """
    if debug_level &gt; 0:
        sys.stdout.write('\rDEBUG: {0}\n'.format(message))


def progress_bar(current, total, size=20):
    """ show a simple progress bar on the CLI """
    current_percent = float(current) / total
    hashes = '#' * int(round(current_percent * size))
    spaces = ' ' * (size - len(hashes))
    sys.stdout.write('\rProgress: [{0}] {1}% ({2} of {3} CVEs processed)'.format(hashes + spaces, int(round(current_percent * 100)), current, total))
    if (current == total):
        sys.stdout.write('\n')

    sys.stdout.flush()


# Class to contain the binary package cache
class PackageCache():

    cachefile = None
    cache_updates = 0
    releases = dict()
    unpublished_sources = dict()

    def __init__(self, cachefile='data_file.json', force_reload=False):
        self.cachefile = cachefile
        self.force_reload = force_reload

        # open the local cache if it exists
        if os.path.exists(self.cachefile):
            debug('Opening and reading cache file %s' % self.cachefile)
            with open(self.cachefile, "r") as read_file:
                self.pkgcache = json.load(read_file)
        else:
            self.pkgcache = dict()
            self.force_reload = True

        # Get launchpad handlers...
        debug('Setting up launchpad connection...')
        self.lp = lpl_common.connect(version='devel')
        #lp = Launchpad.login_anonymously("generate-oval", "production", version='devel')
        self.ubuntu = self.lp.distributions['ubuntu']
        self.archive = self.ubuntu.main_archive

        # if debugging is enabled, flush cache every update
        self.cache_write_frequency = 1 if debug_level &gt; 0 else 100

    def write_cache(self):
        debug('Writing cache file %s' % self.cachefile)
        if self.cachefile:
            # create as separate file and try to do atomic rename so
            # multiple writers don't corrupt cache.
            with tempfile.NamedTemporaryFile(mode='w', prefix='oval_pkg_cache-', suffix='.new', dir=os.path.dirname(self.cachefile), delete=False) as write_file:
                new_cachefile = write_file.name
                json.dump(self.pkgcache, write_file, indent=2)

            os.rename(new_cachefile, self.cachefile)

    def _has_no_published_source(self, package, release):
        return (package in self.unpublished_sources
                and release in self.unpublished_sources[package])

    def _add_no_published_source(self, package, release):
        if package not in self.unpublished_sources:
            self.unpublished_sources[package] = [release]
        else:
            self.unpublished_sources[package].append(release)

    # lookup source package in launchpad, get latest version
    def _lookup_latest_source_package(self, source_name, release):

        # cache lp release info
        if release not in self.releases:
            # we can have nested parent releases
            parent = release_parent(release)
            while release_parent(parent):
                parent = release_parent(parent)
            if parent:
                self.releases[release] = self.ubuntu.getSeries(name_or_version=parent)
            else:
                self.releases[release] = self.ubuntu.getSeries(name_or_version=get_orig_rel_name(release))

        ppa = release_ppa(release)
        if ppa:
            archive, group, ppa_full_name = lpl_common.get_archive(ppa, self.lp, False, distribution=self.ubuntu)
            sources = archive.getPublishedSources(exact_match=True, source_name=source_name, distro_series=self.releases[release], status='Published')
            if len(sources) == 0:
                # if release is subproject and no version of package was released to it
                # then copy source from parent release
                sources = self.archive.getPublishedSources(exact_match=True, source_name=source_name, distro_series=self.releases[release], status='Published')
        else:
            sources = self.archive.getPublishedSources(exact_match=True, source_name=source_name, distro_series=self.releases[release], status='Published')

        # some kernels get statuses even when not published in the
        # archive yet
        if len(sources) == 0:
            self._add_no_published_source(source_name, release)
            return None

        # in python3, filter returns an iterable object, so wrap in list()
        sources = list(filter(lambda x: x.pocket in ['Release', 'Security', 'Updates'], sources))
        # some packages are only in proposed, even for non-devel releases
        if len(sources) == 0:
            self._add_no_published_source(source_name, release)
            return None

        source = sorted(sources,
                        key=functools.cmp_to_key(lambda x, y: apt_pkg.version_compare(x.source_package_version, y.source_package_version)),
                        reverse=True)[0]
        debug('Launchpad returned %s %s' % (source.source_package_name, source.source_package_version))
        return source

    def get_binarypkgs(self, pname, release, version=None):
        """ return a list of binary packages from the source package """

        # first check local cache
        #
        # if the version in the cve tracker is newer than the
        # version in the cache, we should refresh the cache
        # if there's no version in the tracker, return the cached entry
        if pname in self.pkgcache:
            if (release in self.pkgcache[pname]['Releases'] and
                (not version or
                 apt_pkg.version_compare(version, self.pkgcache[pname]['Releases'][release].get('source_version', 0)) &lt;= 0)):
                return self.pkgcache[pname]['Releases'][release]['binaries']

        debug('Cache miss: %s %s %s' % (pname, release, version))

        # skip lookup if unpublished_sources is empty and
        # force_reload is False
        if not self.unpublished_sources and not self.force_reload:
            if pname not in self.pkgcache or release not in self.pkgcache[pname]['Releases']:
                return None

        # skip lookup if we've already done a lookup and found no
        # published source for that release
        if self._has_no_published_source(pname, release):
            return None

        # query launchpad if not in local cache
        source = self._lookup_latest_source_package(pname, release)
        if not source:
            return None

        binaries = source.getPublishedBinaries()
        binlist = []
        for i in binaries:
            # skip if we already saw this package
            if (i.binary_package_name in binlist):
                continue
            # for kernel we only want linux images
            if pname.startswith('linux') and not (i.binary_package_name).startswith('linux-image-'):
                continue
            # skip ignored packages, with exception of golang*-dev pkgs
            if (i.binary_package_name).startswith(('golang-go')) or not any(s in i.binary_package_name for s in packages_to_ignore):
                binlist.append(i.binary_package_name)

        # save current pkgcache to local cache
        if pname not in self.pkgcache:
            self.pkgcache[pname] = {'Releases': {}}
        self.pkgcache[pname]['Releases'][release] = {'binaries': binlist, 'source_version': source.source_package_version}

        self.cache_updates += 1
        if self.cache_updates % self.cache_write_frequency == 0:
            self.write_cache()

        return binlist

# loads usn database.json based given a path to it.
# To get the database proceed as: $UCT/scripts/fetch-db database.json.bz2
def get_usn_database(usn_db_dir):
    data = None
    default_usn_database = os.path.join(usn_db_dir, 'database.json')
    if not os.path.exists(default_usn_database):
        error('{} must exists'.format(default_usn_database))

    with open(default_usn_database, 'r') as database:
        data = json.load(database)
        return data
    return None

# Usage:
# for a given release only:
#   ./generate-oval --usn-oval --usn-db-dir ~/usndb --usn-oval-release=focal --output-dir /tmp/oval_usn
# for all the releases:
#   ./generate-oval --usn-oval --usn-db-dir ~/usndb --output-dir /tmp/oval_usn
# for a specific release and USN-number
#   ./generate-oval --usn-oval --usn-db-dir ~/usndb --usn-oval-release=focal --usn-number=1234
# WARNING:
#  be sure the release you are passing is in the usn-number passed
#  otherwise it will generate an oval file without the usn info.
def generate_oval_usn(outdir, usn, usn_release, cve_dir, usn_db_dir, ociprefix=None, ocioutdir=None):
    # Get the usn database.json data
    usn_database = get_usn_database(usn_db_dir)
    if not usn_database:
        error("Error getting USN database.")

    if usn:
        if usn not in usn_database:
            error("Please enter a valid USN number or update your database.json and try again")

    if usn_release:
        if usn_release not in supported_releases:
            error("Please enter a valid release name.")

    # Create OvalGeneratorUSN objects
    ovals = []
    # Does the oval for just a specific given release
    if usn_release:
        ovals.append(oval_lib.OvalGeneratorUSN(usn_release, release_name(usn_release), outdir, cve_dir))
        # Also produce oval generator object for OCI
        if ocioutdir:
            ovals.append(oval_lib.OvalGeneratorUSN(usn_release, release_name(usn_release), ocioutdir,
                                                   cve_dir, ociprefix, 'oci'))
    else:
        for release in supported_releases:
            # for now we don't differentiate products (e.g. esm) in the USN DB
            _, product, _, _ = get_subproject_details(release)
            if product != PRODUCT_UBUNTU:
                continue

            ovals.append(oval_lib.OvalGeneratorUSN(release, release_name(release), outdir, cve_dir))
            # Also produce oval generator object for OCI
            if ocioutdir:
                ovals.append(oval_lib.OvalGeneratorUSN(release, release_name(release), ocioutdir,
                                                       cve_dir, ociprefix,
                                                       'oci'))

    # Generate OVAL USN data
    if usn:
        for oval in ovals:
            oval.generate_usn_oval(usn_database[usn], usn, cve_dir)
    else:
        for usn in usn_database.keys():
            for oval in ovals:
                oval.generate_usn_oval(usn_database[usn], usn, cve_dir)

    for oval in ovals:
        oval.write_oval_elements()


    return True

if __name__ == '__main__':
    main()
</pre></body></html>