# -*- mode: python; coding: utf-8 -*-
# vim:smartindent cinwords=if,elif,else,for,while,try,except,finally,def,class:ts=4:sts=4:sta:et:ai:shiftwidth=4
#
# Copyright ©  2004 Canonical Ltd.
#	Author: Robert Collins <robertc@robertcollins.net>

# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.

# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.

# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA

from PQMConfigParser import ConfigParser
import config_manager
from copy import copy
import logging
import os
import popen2
import re
import shutil
import stat
import string
import sys
import time
import urllib
import email

# These need thought and layer - but for now, are just moved
# from the binary script intact. They should be made class or
# instance scope, but there wasn't a trivial way while moving
# them from the script.
keyring = None
used_transactions = {}
gnupatch_path = 'patch'
logger = logging # default value for simple use.
groups = {}

def get_queuedir(config_parser, logger, args):
    """Get the queuedir that should be used from the config"""
    if config_parser.has_option('DEFAULT', 'queuedir'):
        return os.path.abspath(os.path.expanduser(
            config_parser.get('DEFAULT', 'queuedir')))
    elif len(args) > 0:
        return os.path.abspath(args[0])
    else:
        logger.error("No queuedir specified on command line or"
                     " in config_parser files.")
        sys.exit(1)
        
configfile_names = ['/etc/pqm.conf']
configfile_names.append(os.path.expanduser('~/.pqm.conf'))
configfile_names.append(os.path.expanduser('~/.arch-pqm.conf'))
configfile_names.append(os.path.expanduser('~/.tla-pqm.conf'))

class Script(object):
    """A command script."""

    whitespace_re = re.compile('^\s*$')
    pgp_re = re.compile('^-----BEGIN PGP.*MESSAGE')
    pgp_end_re = re.compile('^-----BEGIN PGP SIG')

    def __init__(self, filename, logger, verify_sigs, submission_time):
        """Create a script for a given file."""
        self.filename = filename
        self.logger = logger
        self.verify_sigs = verify_sigs
        self.readComplete = False
        self.submission_time = submission_time

    def _read(self):
        """Read in the script details."""
        #  This is cruft from the binary script. It actually does need
        #  to read the file twice with the current layering - yes thats funky, but
        #  not something I plan on breaking in an ad hoc manner right now. The
        #  read_email and email.message_from_file and verify_sig functions need
        #  rearranging and consolidating to allow this.
        details = read_email(self.logger, open(self.filename))
        (self.sender, self.subject, self.msg, self.sig) = details
        if self.verify_sigs:
            sigid,siguid = verify_sig(self.sender,
                                      self.msg,
                                      self.sig, 
                                      0,
                                      self.logger)
        self.msg = email.message_from_file(open(self.filename))
        self.sender = self.msg['From']
        self.logger.info ('sender: %s' , self.sender)
        self.logger.info ('subject: %s' , self.subject)
        self.readComplete = True

    def getSender(self):
        """Get the sender of the script."""
        if not self.readComplete:
            self._read()
        return self.sender

    def getSubject(self):
        """Get the subject of the script."""
        if not self.readComplete:
            self._read()
        return self.subject

    def getContent(self):
        """Get the raw content for folk that want it."""
        if not self.readComplete:
            self._read()
        return self.msg.get_payload()

    def getLines(self):
        """Get the lines in the script."""
        if not self.readComplete:
            self._read()
        to_skip = 0
        result = []
        for line in self.msg.get_payload().split('\n'):
            if to_skip:
                to_skip -= 1
                continue
            if self.whitespace_re.match(line):
                continue
            if self.pgp_re.match(line):
                to_skip = 2
                continue
            if self.pgp_end_re.match(line):
                break
            result.append(line)
        return result

    def getCommands(self):
        """Get the actual command lines from the script."""
        return [line for line in self.getLines() if self.isCommand(line)]

    def getSubmissionTime(self):
        """Return the time the script was submitted."""
        return self.submission_time

    def isCommand(self, line):
        """Return true if line looks like a valid command line."""
        # XXX: This is not ready yet, its still in CommandRunners guts.
        return True

def find_patches(queuedir, logger, verify_sigs):
    patches=[]
    patches_re=re.compile('^patch\.\d+$')
    for f in os.listdir(queuedir):
        if patches_re.match(f):
            fname=os.path.join(queuedir, f)
            submission_time = os.stat(fname)[stat.ST_MTIME]
            patches.append((Script(fname, logger, verify_sigs, 
                                   submission_time), 
                            submission_time))
    def sortpatches(a, b):
        return cmp(a[1],b[1])
    patches.sort(sortpatches)
    return [patch[0] for patch in patches]
 
def read_email(logger, file=None):
    """Read an email and check it has the required structure for commands."""
    if not file:
        file = sys.stdin
    msg = email.message_from_file(file)
    sender = msg['From']
    logger.info("recieved email from %s", sender)
    subject = msg['Subject']
    if not sender:
        raise PQMException(None, "No From specified")
    if (not subject) or subject=='':
        raise PQMException(sender, "No Subject specified")
    text = None
    sig = None
    if msg.is_multipart():
        print ("Multipart support is known buggy. Patches, or better still"
               "unittest tests with or without patches solicited.")
        parts = msg.get_payload()
        if not len(parts) == 2:
            raise PQMException(sender, 
                        "Multipart message must have exactly two parts")
        if not parts[0].get_content_type() == 'text/plain':
            raise PQMException(sender,
                        "First part of multipart message must be text/plain")
        if not parts[1].get_content_type() == 'application/pgp-signature':
            raise PQMException(sender, 
                        "Second part of multipart message must be"
                        " application/pgp-signature")
        return (sender, 
                subject,
                parts[0].get_payload(),
                parts[1].get_payload())
    else:
        return (sender, subject, msg.get_payload(), None)


class PQMException(Exception):
    """PWM specific exceptions derive from this class."""
    def __init__(self, sender, msg):
        self.sender = sender
        self.msg = msg

    def __str__(self):
        return `self.msg`

def verify_sig(sender, msg, sig, check_replay, logger):
    """Verify the GPG signature on a message."""
    verifier = GPGSigVerifier([keyring], gpgv=gpgv_path)
    try:
        tmp_msgpath = os.path.join(pqm_subdir,'tmp-msg')
        open(tmp_msgpath, 'w').write(msg)
        if sig:
            tmp_sigpath = os.path.join(pqm_subdir,'tmp-sig')
            open(tmp_sigpath, 'w').write(sig)
        else:
            tmp_sigpath = None
        output = verifier.verify(tmp_msgpath, tmp_sigpath)
        os.unlink(tmp_msgpath)
        if sig:
            os.unlink(tmp_sigpath)
    except GPGSigVerificationFailure, e:
        raise PQMException(sender, "Failed to verify signature: %s" % e._value)
    gpgre = re.compile('^\[GNUPG:\] (SIG_ID.+)$')
    sigid = None
    for line in output:
        match = gpgre.match(line)
        if match:
            sigid = match.group(1)
            break
    if not sigid:        
        raise PQMException(sender, "Couldn't determine signature timestamp")
    if check_replay and used_transactions.has_key(sigid):
        logger.error("Replay attack detected, aborting")
        raise PQMException(sender, "Replay attack detected, aborting")
    gpg_key_re = re.compile('^\[GNUPG:\] GOODSIG ([0-9A-F]+) .*<([^>]*)>.*$')
    sig_from = None
    for line in output:
        match = gpg_key_re.match(line)
        if match:
            sig_from_mail = match.group (2)
            sig_from_key = match.group (1)
            break
    logger.info ("garh %s : %s", sig_from_key, sig_from_mail)
    return sigid, sig_from_mail

class GPGSigVerificationFailure(Exception):
    """GPG Verification failed."""
    def __init__(self, value, output):
        self._value = value
        self._output = output
        
    def __str__(self):
        return `self._value`

    def getOutput(self):
        return self._output

class GPGSigVerifier:
    """A class to verify GPG signatures."""
    def __init__(self, keyrings, gpgv=None):
        self._keyrings = keyrings
        if gpgv is None:
            gpgv = '/usr/bin/gpgv'
        self._gpgv = gpgv

    def verify(self, filename, sigfilename=None):
        """Verify the signature on tjhe file filename, detached signatures on
        sigfilename.
        """
        (stdin, stdout) = os.pipe()
        pid = os.fork()
        if pid == 0:
            os.close(stdin)
            os.dup2(stdout, 1)
            os.dup2(stdout, 2)
            args = [self._gpgv]
            args.append('--status-fd=2')
            for keyring in self._keyrings:
                args.append('--keyring')
                args.append(keyring)
            if sigfilename:
                args.append(sigfilename)
            args.append(filename)
            os.execvp(self._gpgv, args)
            os.exit(1)
        os.close(stdout)
        output = os.fdopen(stdin).readlines()
        (pid, status) = os.waitpid(pid, 0)
        if not (status is None 
                or (os.WIFEXITED(status) and os.WEXITSTATUS(status) == 0)):
            if os.WIFEXITED(status):
                msg = ("gpgv exited with error code %d" % 
                        os.WEXITSTATUS(status))
            elif os.WIFSTOPPED(status):
                msg = ("gpgv stopped unexpectedly with signal %d" %
                        os.WSTOPSIG(status))
            elif os.WIFSIGNALED(status):
                msg = "gpgv died with signal %d" % os.WTERMSIG(status)
            raise GPGSigVerificationFailure(msg, output)
        return output


class CommandRunner(object):
    star_re = re.compile('^star-merge (\S+/\S+)\s+(\S+/\S+)\s*$')
    replay_re = re.compile('^replay (\S+/\S+)\s+(\S+/\S+)\s*$')
    repo_cache_re = re.compile('^repo-cache-revision (\S+/\S+)\s*$')
    repo_uncache_re = re.compile('^repo-uncache-revision (\S+/\S+)\s*$')
    tag_re = re.compile('^tag (\S+/\S+)\s+(\S+/\S+)\s*$')
    make_repo_re = re.compile('^make-repo (\S+)\s+(\S+)\s*$')
    create_branch_re = re.compile('^create-branch (\S+/\S+)\s+(\S+/\S+)\s*$')
    create_version_re = re.compile('^create-version (\S+/\S+)\s*$')
    patch_re = re.compile('^patch (\S+/\S+)\s*$')
    debug_re = re.compile('^debug')

    def _branchspec_matches(self, branch_spec, url):
        return ((branch_spec == url and url[-1] != '/') or 
                (branch_spec.startswith(url) and url[-1] == '/' and 
                 len(branch_spec) > len(url)))

    def get_arch_impl(self):
        if self.arch_impl is None:
            arch_impl = None
            (status, msg, output) = popen_noshell(self.arch_path, '--version')
            for line in output:
                if line.find('baz ') >= 0:
                    arch_impl = Baz1_1Handler()
                    break
                elif line.find('tla ') >= 0:
                    arch_impl = TlaHandler()
                    break
                elif line.find('ArX ') >= 0:
                    arch_impl = ArXHandler()
                    break
            if not arch_impl:
                logger.error("Couldn't determine arch implementation.\n"
                             "please set arch_impl\n"
                             "(output %s)\n"
                             "(msg %s)\n" % (output, msg))
                sys.exit(1)
            self.arch_impl = arch_impl
        return self.arch_impl

    def get_branch_handler(self, branchspec):
        handler = Bazaar2Handler()
        if handler.branch_exists(self.script.getSender(), branchspec):
            return handler
        handler = self.get_arch_impl()
        return handler

    def _get_url_override_mapper(self):
        """Get a URL override mapper from the config."""
        if self._url_override_mapper is not None:
            return self._url_override_mapper
        mapper = config_manager.URLMapper()
        if not self.configp.has_section('location overrides'):
            return mapper
        for target, source in self.configp.section_items('location overrides'):
            equal_pos = source.find('=')
            if (equal_pos > -1 and target.find('//') == -1 and
                source.startswith('//')):
                # : triggers separation, THANKS config-parser.
                target = target + ":" + source[:equal_pos]
                source = source[equal_pos + 1:]
                print target, source
            mapper.add_map(source, target)
        self._url_override_mapper = mapper
        return mapper

    def get_target_config(self, branch_spec):
        """Get the branch url we should use and its config."""
        for key, value in allowed_revisions.items():
            if (self._branchspec_matches(branch_spec, key) or
                (value.has_key('published_at') and
                value['published_at'] is not None and
                self._branchspec_matches(branch_spec, value['published_at']))):
                if not branch_spec.startswith(key):
                    branch_spec = key + branch_spec[len(value['published_at']):]
                config = copy(value)
                if value.get('published_at', None) is not None:
                    # calculate real published location
                    config['published_at'] = (value['published_at'] + 
                                              branch_spec[len(key):])
                    if config.get('publish_to', None) is None:
                        config['publish_to'] = config['published_at']
                    else:
                        config['publish_to'] = (config['publish_to'] +
                                                branch_spec[len(key):])
                if not config.has_key('publish_to'):
                    config['publish_to'] = None
                return branch_spec, config
        raise KeyError("branch %s is not configured for pqm." % branch_spec)

    def get_vcs(self):
        return self._vcs

    def __init__(self):
        self.arch_impl = None
        self.arch_path = 'baz'
        self._vcs = None
        self.successful = []
        self._url_override_mapper = None

    def run(self, script, user_email):
        self.accumulating_patch = False
        self.patch_target = None
        self.patch_content = []
        self.successful = []
        self.unrecognized = []
        self.output = []
        self.commitmsg = script.getSubject()
        self.script = script
        self.user_email=user_email
        self.debug = True
        self.cleanup_wd()
        logger.info("parsing commands")
        for line in self.script.getLines():
            self.run_command(line)
        if self.patch_content != []:
            self.patch_target, config = self.get_target_config(self.patch_target)
            self.set_current_vcs(self.patch_target)
            self.validate_revision(self.patch_target)
            logger.info("getting working dir for %s", self.patch_target)
            origdir = os.getcwd()
            dir = self.get_wd(script.getSender(),
                              self.patch_target,
                              config,
                              self.user_email)
            try:
                os.chdir(dir)
                try:
                    self.output += ['\n']
                    (summary, moreoutput) = self.do_patch(script.getSender(),
                                                          self.patch_content)
                    self.output += moreoutput
                    self.output += ['\n']
                except PQMTlaFailure, e:
                    raise PQMCmdFailure(script.getSender(),
                                        self.successful,
                                        'patch ' + self.patch_target,
                                        self.output + e.output)
            except:
                os.chdir(origdir)
                raise
            logger.info("executing patch")
            self.output = self.run_precommit(script.getSender(),
                                             self.successful,
                                             self.patch_target,
                                             config,
                                             self.output,
                                             line, dir)
            self.successful.append('patch ' + self.patch_target)
            self.output += ['\n', 'patch succeeded at %s\n' % (time.strftime('%c')), '\n']
            os.chdir(origdir)
            self.get_vcs().commit(script.getSender(), dir, summary,
                                  self.patch_target, config)
        self.cleanup_wd()
        return (self.successful, self.unrecognized, self.output)
    
    def run_command(self, line):
        # its a command of some sort
        patch_match = self.patch_re.match(line)
        star_match = self.star_re.match(line)
        replay_match = self.replay_re.match(line)
        repo_cache_match=self.repo_cache_re.match(line)
        repo_uncache_match=self.repo_uncache_re.match(line)
        tag_match=self.tag_re.match(line)
        create_branch_match=self.create_branch_re.match(line)
        make_repo_match=self.make_repo_re.match(line)
        create_version_match=self.create_version_re.match(line)
        debug_match = self.debug_re.match(line)
        sender = self.script.getSender()

        if patch_match:
            # GNU Patch
            logger.info("patch content found, target: %s", patch_match.group(1))
            self.patch_target = patch_match.group(1)
            self.check_target(self.patch_target, line)
            self.accumulating_patch = True
        elif self.accumulating_patch:
            self.patch_content.append(line)
        elif star_match:
            self.do_merge(from_repo_revision=star_match.group(1),
                          to_repo_revision=star_match.group(2),
                          merge_name='star-merge',
                          merge_method="do_star_merge",
                          line=line)
        elif replay_match:
            self.do_merge(from_repo_revision=replay_match.group(1),
                          to_repo_revision=replay_match.group(2),
                          merge_name='replay',
                          merge_method="do_replay",
                          line=line)
        elif repo_cache_match:
            # Cache a revision
            repo_revision = repo_cache_match.group(1)
            (repo, revision) = repo_revision.split('/', 1)
            self.check_target(repo_revision, line)
            self.set_current_vcs(repo_revision)
            self.validate_revision(repo)
            self.output += ['\n', 'Executing repo-cache-revision %s/%s at %s\n' % (repo, revision,
                                                                               time.strftime('%c')), '\n']
            self.wrap_command(self.get_vcs().do_repo_cache, line, sender, repo, revision)
            logger.info("success: %s" % (line,))
            self.successful.append(line)
            self.output += ['\n', 'repo-cache-revision succeeded at %s\n' % (time.strftime('%c')), '\n']
        elif repo_uncache_match:
            # Uncache a revision
            repo_revision = repo_uncache_match.group(1)
            (repo, revision) = repo_revision.split('/', 1)
            self.check_target(repo_revision, line)
            self.set_current_vcs(repo_revision)
            self.validate_revision(repo)
            self.output += ['\n', 'Executing repo-uncache-revision %s/%s at %s\n' % (repo, revision,
                                                                                 time.strftime('%c')), '\n']
            self.wrap_command(self.get_vcs().do_repo_uncache, line, sender, repo, revision)
            logger.info("success: %s" % (line,))
            self.successful.append(line)
            self.output += ['\n', 'repo-uncache-revision succeeded at %s\n' % (time.strftime('%c')), '\n']
        elif tag_match:
            # Tag a branch
            from_repo_revision = tag_match.group(1)
            to_repo_revision = tag_match.group(2)
            (from_repo, from_revision) = from_repo_revision.split('/', 1)
            (to_repo, to_revision) = to_repo_revision.split('/', 1)
            self.set_current_vcs(from_repo_revision, to_repo_revision)
            self.validate_revision(from_repo_revision)
            self.validate_revision(to_repo_revision)
            self.check_target(to_repo_revision, line)
            self.output += ['\n', 'Executing tag %s/%s at %s\n' % (from_repo, from_revision,
                                                            time.strftime('%c')), '\n']
            self.wrap_command(self.get_vcs().do_tag, line, sender, from_repo,from_revision, to_repo, to_revision)
            logger.info("success: %s" % (line,))
            self.successful.append(line)
            self.output += ['\n', 'tag succeeded at %s\n' % (time.strftime('%c')), '\n']
        elif create_branch_match:
            # Create a branch
            from_repo_revision = create_branch_match.group(1)
            to_repo_revision = create_branch_match.group(2)
            self.set_current_vcs(from_repo_revision, to_repo_revision)
            self.validate_revision(from_repo_revision)
            self.validate_revision(to_repo_revision)
            self.check_target(to_repo_revision, line)
            logger.info("getting working dir for %s", to_repo_revision)
            to_repo_revision, config = self.get_target_config(to_repo_revision)
            (to_repo, to_revision) = to_repo_revision.split('/', 1)
            dir = self.get_wd(sender, to_repo_revision, config, self.user_email)
            origdir = os.getcwd()
            self.output += ['\n',
                            'Executing create-branch %s %s at %s\n'
                            % (from_repo_revision,
                               to_repo_revision,
                               time.strftime('%c')),
                            '\n']
            try:
                os.chdir(dir)
                self.wrap_command(self.get_vcs().do_create_branch, line, sender, to_repo, to_revision)
            except:
                os.chdir(origdir)
                raise
            os.chdir(origdir)
            logger.info("success: %s" % (line,))
            self.successful.append(line)
            self.output += ['\n', 'create-branch succeeded at %s\n' % (time.strftime('%c')), '\n']
            os.chdir(origdir)
            self.get_vcs().commit(sender, dir, self.commitmsg, to_repo_revision, config)
        elif make_repo_match:
            # Make a repo
            repo = make_repo_match.group(1)
            location = make_repo_match.group(2)
            # make-repo needs its syntax changed - we can't probe for
            # the vcs as the target doesn't exist yet.
            self.check_target(repo, line)
            self.output += ['\n', 'Executing make-repo %s %s at %s\n' % (repo, location,
                                                                     time.strftime('%c')), '\n']
            self.wrap_command(self.get_arch_impl().do_make_repo, line, sender, repo, location)
            logger.info("success: %s" % (line,))
            self.successful.append(line)
            self.output += ['\n', 'make-repo succeeded at %s\n' % (time.strftime('%c')), '\n']
        elif create_version_match:
            # Create a new line of development
            repo_revision = create_version_match.group(1)
            (repo, revision) = repo_revision.split('/', 1)
            self.set_current_vcs(repo_revision)
            self.validate_revision(repo_revision)
            self.check_target(repo_revision, line)
            logger.info("getting working dir for %s/%s" % (repo, revision))
            
            dirpath=os.path.join(workdir, repo)
            
            if not os.access(dirpath, os.W_OK):
                os.mkdir(dirpath)
                
            os.chdir(dirpath)
            dir=os.path.join(dirpath, revision)        
            if os.access(dir, os.W_OK):
                raise PQMException(sender, "Working dir already exists: " + dir)
            os.mkdir(dir)
            
            origdir = os.getcwd()
            self.output += ['\n', 'Executing create-version %s/%s at %s\n' % (repo, revision, time.strftime('%c')), '\n']
            try:
                os.chdir(dir)
                self.wrap_command(self.get_arch_impl().do_create_version, line, sender, repo, revision)
            except:
                os.chdir(origdir)
                raise
            os.chdir(origdir)
            logger.info("success: %s" % (line,))
            self.successful.append(line)
            self.output += ['\n', 'create-version succeeded at %s\n' % (time.strftime('%c')), '\n']
            #self.get_arch_impl().commit(sender, dir, self.commitmsg, repo_revision)
        elif debug_match:
            self.debug = True
        else:
            self.unrecognized.append(line)

    def wrap_command(self, command, line, sender, *args):
        try:
            self.output += command(sender, *args)
        except PQMTlaFailure, e:
            raise PQMCmdFailure(sender, self.successful, line, self.output + e.output)
        
    def check_commit_regex(self, branch, config):
        """Confirm that commit message matches any regexp supplied in the
        configuration file.
        """
        if not config["commit_re"]:
            # No regexp, therefore accept anything
            return
        regex = config["commit_re"]
        if re.match(regex, self.commitmsg):
            # Regexp matched, accept the commitmsg
            return
        raise PQMException(self.script.getSender(),
                           "Commit message [%s] does not match commit_re [%s]"
                           % (self.commitmsg, regex)
                           )

    def check_target(self, branch, line):
        """Check that the sender is allowed to commit to torepo/to_revision"""
        # FIXME check gpg etc etc.
        try:
            self.get_target_config(branch)
        except KeyError:
            raise PQMCmdFailure(self.script.getSender(),
                                self.successful,
                                line,
                                ["Sender not authorised to commit to branch %s\n"
                                    % branch])

    def do_merge(self, from_repo_revision, to_repo_revision, merge_name, merge_method, line):
        sender = self.script.getSender()
        # Star-merge
        self.check_target(to_repo_revision, line)
        to_repo_revision, config = self.get_target_config(to_repo_revision)
        self.set_current_vcs(from_repo_revision, to_repo_revision)
        self.validate_revision(from_repo_revision)
        self.validate_revision(to_repo_revision)
        self.check_commit_regex(to_repo_revision, config)
        logger.info("getting working dir for %s", to_repo_revision)
        logger.info("current cwd is %s", os.getcwd())
        dir = self.get_wd(sender, to_repo_revision, config, self.user_email)
        origdir = os.getcwd()
        self.output += ['\n',
                        'Executing %s %s at %s\n' % (merge_name,
                                                     from_repo_revision,
                                                     time.strftime('%c')),
                        '\n']
        self.wrap_command(getattr(self.get_vcs(), merge_method), line, sender,
                                  from_repo_revision, dir)
        self.output = self.run_precommit(sender, self.successful,
                                         to_repo_revision, config,
                                         self.output, line, dir)
        os.chdir(origdir)
        logger.info("success: %s" % (line,))
        self.successful.append(line)
        self.output += ['\n', '%s succeeded at %s\n' % (merge_name, time.strftime('%c')), '\n']
        self.get_vcs().commit(sender, dir, self.commitmsg, to_repo_revision, config)

    def cleanup_wd(self):
        """Cleans up all possible working dirs."""
        logger.info("cleaning working directory")
        for top in os.listdir(workdir):
            self.rm_rf(os.path.join(workdir, top))
        for branch, value in allowed_revisions.items():
            possible_dir = value['build_dir']
            if not possible_dir:
                continue
            if not os.path.exists(possible_dir):
                continue
            for top in os.listdir(possible_dir):
                self.rm_rf(os.path.join(possible_dir, top))

    def get_wd(self, sender, branch, config, user_email):
        dirpath = self._make_wd_path(workdir, branch)
        commiters = config['commiters']
        if commiters and not user_email in groups[commiters]:
            logger.error("%s is not permitted to commit to %s", 
                         user_email,
                         branch)
            raise PQMException(sender,
                               "%s is not permitted to commit to %s" % (
                                    user_email, branch))
        possible_dir = config['build_dir']
        if possible_dir:
            dirpath = self._make_wd_path(possible_dir, branch)
        build_config = config['build_config']
        fullpath = self.prep_wd(sender, dirpath, branch, build_config)
        if build_config:
            branch_list = [branch]
            if config.get('published_at', None):
                branch_list.append(config['published_at'])
            return self.branch_from_config(build_config, sender, branch_list, fullpath)
        else:
            self.get_vcs().make_local_dir(sender, branch, fullpath)
            return fullpath

    def _make_wd_path(self, directory, branch):
        """Make the path to be used for branch in directory"""
        elements = branch.split('/')
        index = 0
        if elements[0] == '':
            index = 1
        result = os.path.join(directory, elements[index])
        if result[-1] == ':':
            result = result[:-1]
        return result

    def validate_revision(self, branch):
        if not self.get_vcs().branch_exists(self.script.getSender(), branch):
            raise PQMCmdFailure(self.script.getSender(),
                                self.successful,
                                line, 
                                self.output + self.get_vcs().last_error.output)

    def branch_from_config(self, config, sender, branch_list, fullpath):
        """build the config config in dir fullpath. Then
        find one of branch_list in the config, and
        return the path to it."""
        # TODO: probe the config path for the url to get -
        # config package isn't guaranteed arch-style.
        config_segments = config.split('/')
        config_path = config_segments.pop()
        done = False
        while not done:
            try:
                config_branch = '/'.join(config_segments)
                self.get_branch_handler(config_branch).make_local_dir(sender, config_branch, fullpath)
                done = True
            except PQMTlaFailure, e:
                if config_path == config or len(config_segments) == 1:
                    raise
                config_path = '/'.join((config_segments.pop(), config_path))
                if os.path.exists(fullpath):
                    shutil.rmtree(fullpath)
        try:
            stream = urllib.urlopen(os.path.join(fullpath, config_path))
        except IOError,e:
            import errno
            if e.errno == errno.ENOENT:
                raise PQMException(sender, "No such config '%s'" % config_path)
            else:
                raise PQMException(sender, "IOError opening configuration '%s'" % config_path)
        cm_config = config_manager.Config(fromStream=stream,
            override_mapper=self._get_url_override_mapper())
        cm_config.build(fullpath)
        if config_branch in branch_list:
            return fullpath
        for path, entry in cm_config.get_entries().items():
            # probably want a url comparison that understands url parameters
            # TODO: but that can wait.
            if (entry.url in branch_list or 
                (entry.url.startswith('arch://') and
                 entry.url[7:] in branch_list)):
                return os.path.join(fullpath, path)
        raise PQMException(sender,
                           "Branch %s not found in config" % branch)
    
    def _branch_name(self, branchspec):
        elements = branchspec.split('/')
        return elements[-1]
        
    def prep_wd(self, sender, dirpath, branch, config):
        branch_name = self._branch_name(branch)
        if not os.access(dirpath, os.W_OK):
            os.mkdir(dirpath)
        # set revision to the last element of the branch...
        if config:
            elements=re.split("/", config)
            fullpath=os.path.join(dirpath, "%s---%s" % (elements[0], branch_name))
        else:
            fullpath=os.path.join(dirpath, branch_name)        
    
        if os.access(fullpath, os.W_OK):
            logger.error("Working dir already exists: " + fullpath)
            raise PQMException(sender, "Working dir already exists: " + fullpath)
        return fullpath
    
    def rm_rf(self, top):
        for root, dirs, files in os.walk(top, topdown=False):
            for name in files:
                os.remove(os.path.join(root, name))
            for name in dirs:
                if os.path.islink(os.path.join(root, name)):
                    os.remove(os.path.join(root, name))
                else:
                    os.rmdir(os.path.join(root, name))
    
    def run_in_dir(self, local_dir, method, *args):
        """call method after chdiring to local_dir, then chdir back."""
        orig_dir = os.getcwdu()
        try:
            os.chdir(local_dir)
            return method(*args)
        finally:
            os.chdir(orig_dir)

    def run_precommit(self, sender, successful, branch, config, output, line, dir):
        hook = config['precommit_hook']
        if not hook:
            hook = precommit_hook
        if hook:
            logger.info("running precommit hook: %s" % (hook,))
            output += ['\n', 'Executing pre-commit hook %s at %s\n' % (hook, time.strftime('%c')), '\n']
            child = self.run_in_dir(dir, popen2.Popen4, hook)
            child.tochild.close()
            output += child.fromchild.readlines()
            ecode = child.wait()
            if not ((ecode is None) or (ecode == 0)):
                raise PQMCmdFailure(sender, successful, line, output + ['\npre-commit hook failed with error code %d at %s\n' % (ecode - 255, time.strftime('%c'))])
            output += ['\n', 'pre-commit hook succeeded at %s\n' % (time.strftime('%c')), '\n']
        return output
 
    def write_lines_to_fd(self, lines):
        (stdin, stdout) = os.pipe()
        pid = os.fork()
        if pid != 0:
            os.close(stdout)
            return stdin
        os.close(stdin)
        for line in lines:
            os.write(stdout, line)
            os.write(stdout, '\n')
        os._exit(0)
    
    def do_patch(self, sender, content):
        def is_patchline(line):
            return line != '' and (line[0] in ('+', '-') or line[0:2] == '@@')
        if content == []:
            raise PQMException(sender, "Empty patch content")
        if not is_patchline(content[0]):
            summary = content[0]
        else:
            raise PQMException(sender, "No summary given for patch")
        filenames = []
        for line in content:
            if line[0:4] in ('+++ ', '--- '):
                # We intentionally include the date, etc - stripping it out is too hard and error-prone
                filenames.insert(0, line[4:].strip())
        for filename in filenames:
            if (filename.find('/..') > 0) or (filename.find('../') > 0):
                raise PQMException(sender, "Invalid backreferencing filename in patch: %s", filename)
            elif filename[0] == '/':
                raise PQMException(sender, "Invalid absolute filename in patch: %s", filename)
        fd = self.write_lines_to_fd(content)
        (status, msg, output) = popen_noshell_with_input(gnupatch_path, fd, '-p1', '--batch', '--no-backup-if-mismatch')
        os.close(fd)
        if not ((status is None) or (status == 0)):
            raise PQMException(sender, ["patch command \"%s\" failed (%s): %s\n" % (gnupatch_path, status, msg)] + output)
        return (summary, output)
    
    def set_current_vcs(self, branch, other_branch=None):
        branch_vcs = self.get_branch_handler(branch)
        if other_branch is not None:
            other_vcs = self.get_branch_handler(other_branch)
            if branch_vcs.__class__ != other_vcs.__class__:
                raise PQMException(self.script.getSender(), 
                                   ["PQM Cannot merge between different VCS"
                                    "systems. '%s'(%s) and '%s'(%s) are different.\n" 
                                    % (branch, branch_vcs.__class__ ,
                                       other_branch, other_vcs.__class__)] + self.output)
        self._vcs = branch_vcs

class PQMCmdFailure(Exception):
    def __init__(self, sender, goodcmds, badcmd, output):
        self.sender = sender
        self.goodcmds = goodcmds
        self.badcmd = badcmd
        self.output = output
        self.args = (sender, goodcmds, badcmd, output)

class PQMTlaFailure(PQMException):
    def __init__(self, sender, output):
        self.sender = sender
        self.output = output
        self.msg = str(output)

def popen_noshell_with_input(cmd, inputfd, *args):
    (stdin, stdout) = os.pipe()
    pid = os.fork()
    if pid == 0:
        os.close(stdin)
        if inputfd is None:
            inputfd = os.open('/dev/null', os.O_RDONLY) 
        os.dup2(inputfd, 0)
        os.dup2(stdout, 1)
        os.dup2(stdout, 2)
        logger.info("running: " + string.join([cmd] + list(args),' '))
        os.execvp(cmd, [cmd] + list(args))
        os.exit(1)
    os.close(stdout)
    output = os.fdopen(stdin).readlines()
    (pid, status) = os.waitpid(pid, 0)
    msg = ''
    if not (status is None or (os.WIFEXITED(status) and os.WEXITSTATUS(status) == 0)):
        if os.WIFEXITED(status):
            msg = "%s exited with error code %d" % (cmd, os.WEXITSTATUS(status),)
        elif os.WIFSTOPPED(status):
            msg = "%s stopped unexpectedly with signal %d" % (cmd, os.WSTOPSIG(status),)
        elif os.WIFSIGNALED(status):
            msg = "%s died with signal %d" % (cmd, os.WTERMSIG(status),)
    return (status, msg, output)

def popen_noshell(cmd, *args):
    return apply(popen_noshell_with_input, [cmd, None] + list(args))

class VCSHandler:

    def branch_exists(self, sender, branch):
        raise NotImplementedError("branch_exists must be provided by VCS"
                                  " Handlers.")
        
    def commit(self, sender, dir, summary, branch, config):
        raise PQMTlaFailure(sender, 'Unsupported operation')
        
    def do_star_merge(self, sender, from_branch, local_dir):
        raise PQMTlaFailure(sender, 'Unsupported operation')

    def do_replay(self, sender, from_branch, local_dir):
        raise PQMTlaFailure(sender, 'Unsupported operation')

    def do_repo_cache(self, sender, fromrepo, fromrevision):
        raise PQMTlaFailure(sender, 'Unsupported operation')
    
    def do_repo_uncache(self, sender, fromrepo, fromrevision):
        raise PQMTlaFailure(sender, 'Unsupported operation')

    def do_tag(self, sender, fromrepo, fromrevision, to_repo, to_revision):
        raise PQMTlaFailure(sender, 'Unsupported operation')
    
    def do_create_branch(self, sender, to_repo, to_revision):
        raise PQMTlaFailure(sender, 'Unsupported operation')
    
    def do_make_repo(self, sender, repo, location):
        raise PQMTlaFailure(sender, 'Unsupported operation')
    
    def do_create_version(self, sender, repo, revision):
        raise PQMTlaFailure(sender, 'Unsupported operation')

    def make_local_dir(self, sender, branch, output_dir):
        raise PQMTlaFailure(sender, 'Unsupported operation')


class ArchHandler(VCSHandler):
     
    def branch_exists(self, sender, branch):
        # XXX: This isn't quite right, as makeing a new branch
        # also calls validate_revision. It really needs two 
        # separate methods
        try:
            # FIXME, check the revision/version exists
            repo, revision = branch.split('/', 1)
            self.do_whereis_repo(sender, repo)
            return True
        except PQMTlaFailure, e:
            self.last_error = e

    def commit(self, sender, dir, summary, branch, config):
        self.run_in_dir(dir, runtla, sender, 'commit', '-s', summary)
        logger.info("commit succeeded")

    def do_replay(self, sender, from_branch, local_dir):
        return self.run_in_dir(local_dir, runtla, sender, 'replay',
                               '%s' % from_branch)

    def do_whereis_repo(self, sender, repo):
        return runtla(sender, 'whereis-archive', '%s' % (repo))
    
    def run_in_dir(self, local_dir, method, *args):
        """call method after chdiring to local_dir, then chdir back."""
        orig_dir = os.getcwdu()
        try:
            os.chdir(local_dir)
            return method(*args)
        finally:
            os.chdir(orig_dir)

    def make_local_dir(self, sender, branch, output_dir):
        runtla(sender, 'get', '%s' % branch, output_dir)


class ArXHandler(ArchHandler):

    def do_star_merge(self, sender, from_branch, local_dir):
        return self.run_in_dir(local_dir, runtla, sender, 'merge',
                               '%s' % from_branch)

    def do_repo_cache(self, sender, fromrepo, fromrevision):
        return runtla(sender, 'archive-cache', '--add',
                      '%s/%s' % (fromrepo, fromrevision))
    
    def do_repo_uncache(self, sender, fromrepo, fromrevision):
        return runtla(sender, 'archive-cache', '--delete',
                      '%s/%s' % (fromrepo, fromrevision))

    def do_tag(self, sender, fromrepo, fromrevision, to_repo, to_revision):
        return runtla(sender, 'tag',
                      '%s/%s' % (to_repo, to_revision),
                      '%s/%s' % (fromrepo, fromrevision))
    
    def do_create_branch(self, sender, to_repo, to_revision):
        return runtla(sender, 'fork',
                      '%s/%s' % (to_repo, to_revision))
    
    def do_make_repo(self, sender, repo, location):
        return runtla(sender, 'make-archive',
                      '%s' % (repo), '%s' % (location))
    
    def do_create_version(self, sender, repo, revision):
        return runtla(sender, 'init',
                      '%s/%s' % (repo, revision))

    def do_whereis_repo(self, sender, repo):
        return runtla(sender, 'archives', '%s/' % (repo))


class TlaHandler(ArchHandler):

    def do_star_merge(self, sender, from_branch, local_dir):
        return self.run_in_dir(local_dir, runtla, sender, 'star-merge',
                               '%s' % from_branch)

    def do_repo_cache(self, sender, fromrepo, fromrevision):
        return runtla(sender, 'cacherev', '%s/%s' % (fromrepo, fromrevision))
    
    def do_repo_uncache(self, sender, fromrepo, fromrevision):
        return runtla(sender, 'uncacherev', '%s/%s' % (fromrepo, fromrevision))

    def do_tag(self, sender, fromrepo, fromrevision, to_repo, to_revision):
        return runtla(sender, 'tag', '%s/%s' % (fromrepo, fromrevision),
                      '%s/%s' % (to_repo, to_revision))
    
    def do_create_branch(self, sender, to_repo, to_revision):
        return runtla(sender, 'archive-setup', '%s/%s' % (to_repo, to_revision))
    
    def do_make_repo(self, sender, repo, location):
        return runtla(sender, 'make-archive', '%s' % (repo), '%s' % (location))
    
    def do_create_version(self, sender, repo, revision):
        return runtla(sender, 'archive-setup', '%s/%s' % (to_repo, to_revision))
 

class BazBaseHandler(ArchHandler):

    def do_repo_cache(self, sender, fromrepo, fromrevision):
        return runtla(sender, 'cacherev', '%s/%s' % (fromrepo, fromrevision))
    
    def do_repo_uncache(self, sender, fromrepo, fromrevision):
        return runtla(sender, 'uncacherev', '%s/%s' % (fromrepo, fromrevision))

    def do_tag(self, sender, fromrepo, fromrevision, to_repo, to_revision):
        return runtla(sender, 'branch', '%s/%s' % (fromrepo, fromrevision),
                      '%s/%s' % (to_repo, to_revision))

    def do_make_repo(self, sender, repo, location):
        return runtla(sender, 'make-archive', '%s' % (repo), '%s' % (location))
    

class Baz1_0Handler(BazBaseHandler):

    def do_star_merge(self, sender, from_branch, local_dir):
        return self.run_in_dir(local_dir, runtla, sender, 'star-merge', '-t',
                               '%s' % from_branch)

    def do_create_branch(self, sender, to_repo, to_revision):
        return runtla(sender, 'archive-setup', '%s/%s' % (to_repo, to_revision))
    
    def do_create_version(self, sender, repo, revision):
        return runtla(sender, 'archive-setup', '%s/%s' % (to_repo, to_revision))


class Baz1_1Handler(BazBaseHandler):

    def do_star_merge(self, sender, from_branch, local_dir):
        return self.run_in_dir(local_dir, runtla, sender, 'merge',
                               '--star-merge', '%s' % from_branch)


class Bazaar2Handler(VCSHandler):

    def branch_exists(self, sender, branchspec):
        try:
            import bzrlib
        except ImportError:
            return False
        from bzrlib.branch import Branch
        try:
            branch = Branch.open(branchspec)
            return True
        except bzrlib.errors.NotBranchError, e:
            self.last_error = e
            if not hasattr(e, "output"):
                self.last_error.output = []
            return False

    def commit(self, sender, dir, summary, parent_branch, config):
        from bzrlib.branch import Branch
        import bzrlib.plugin
        import bzrlib.errors
        bzrlib.plugin.load_plugins()
        branch = Branch.open(dir)
        branch.working_tree().commit(summary, verbose=False)
        logger.info("commit succeeded")
        parent_branch = Branch.open(parent_branch)
        parent_branch.pull(branch)
        logger.info("commited '%s' to '%s'", branch, parent_branch)
        if config.get('publish_to', None) != None:
            try:
                public_branch = Branch.open(config['publish_to'])
                public_branch.pull(branch)
                logger.info("commited '%s' to '%s'", branch, public_branch)
            except (bzrlib.errors.BzrError, bzrlib.errors.BzrNewError), e:
                parent_branch.set_revision_history(
                    parent_branch.revision_history()[:-1])
                raise PQMTlaFailure(sender, [str(e).splitlines(True)])

    def do_star_merge(self, sender, from_branch, local_dir):
        from bzrlib.merge import merge
        from bzrlib.merge_core import ApplyMerge3
        from bzrlib.branch import Branch
        branch = Branch.open(local_dir)
        merge_type = ApplyMerge3
        base = [None, None]
        other = (from_branch, -1)
        conflicts = merge(other, base, check_clean=True, merge_type=merge_type, this_dir=local_dir)
        if conflicts:
            error_lines = ['Conflicts during merge: %s\n' % conflicts]
            for path in branch.working_tree().iter_conflicts():
                error_lines.append('%s\n' % path)
            raise PQMTlaFailure(sender, error_lines)
        return ["merge successful\n"]
        
    def make_local_dir(self, sender, branch_spec, output_dir):
        from bzrlib.branch import Branch
        from bzrlib.clone import copy_branch
        os.mkdir(output_dir)
        branch = Branch.open(branch_spec)
        copy_branch(branch, output_dir)

#arch-tag: dc99ede3-0c64-434d-ac84-305c06455a8d
