extras-buildsys/server Config.py, NONE, 1.1 ArchJob.py, 1.10, 1.11 BuildMaster.py, 1.30, 1.31 Builder.py, 1.11, 1.12 BuilderManager.py, 1.11, 1.12 EmailUtils.py, 1.1, 1.2 Makefile, 1.6, 1.7 PackageJob.py, 1.21, 1.22 Repo.py, 1.11, 1.12 User.py, 1.6, 1.7 UserInterface.py, 1.44, 1.45 main.py, 1.11, 1.12 CONFIG.py, 1.21, NONE

Daniel Williams (dcbw) fedora-extras-commits at redhat.com
Mon Aug 15 03:18:23 UTC 2005


Author: dcbw

Update of /cvs/fedora/extras-buildsys/server
In directory cvs-int.fedora.redhat.com:/tmp/cvs-serv4617/server

Modified Files:
	ArchJob.py BuildMaster.py Builder.py BuilderManager.py 
	EmailUtils.py Makefile PackageJob.py Repo.py User.py 
	UserInterface.py main.py 
Added Files:
	Config.py 
Removed Files:
	CONFIG.py 
Log Message:
2005-08-14  Dan Williams <dcbw at redhat.com>

    * Switch to new server config file format using ConfigParser
        - Server config files are no longer python scripts
        - Server now takes a mandatory -c option for config file location
        - Server will write out a default config file at the specified location
            if none exists
        - Each target has its own config file, by default in /etc/plague/targets/
        - CVSROOT and CVS_RSH are now target-specific, not server-wide
        - Additional Package Arches are now target-specific, and reside in each
            target's config file rather than server-wide

    * "Scratch" targets are now supported; ie, they are targets that do not
        contribute packages to a repository.  Use the option "scratch=yes" in
        the target's config file in the "General" section to make the target a
        scratch target.

    * Repository Scripts: you may now specify a script that is run after packages
        are copied to a repository.  Use the "repo_script" option in each target's
        config file to specify a script for that target/repo.  The script must
        exit with result 0 on success, and > 0 on error.  The script is given
        one argument, which is the name of the target/repo which has just copied
        packages.  If the script fails, those listed in "admin_emails" in the
        server config file will be mailed with the script's output.  The script
        is also killed if it takes longer than 1 hour to complete, since packages
        cannot be built for the target while the script is running.




--- NEW FILE Config.py ---
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
# Copyright 2005 Dan Williams <dcbw at redhat.com> and Red Hat, Inc.

import os
from ConfigParser import ConfigParser


class ConfigError(Exception):
    pass

class BaseConfig:
    def __init__(self, filename):
        self._filename = filename
        self._config = ConfigParser()

    def open(self, filename=None):
        if not filename:
            filename = self._filename
        if not os.path.exists(filename):
            raise ConfigError("Config file '%s' missing." % filename)
        self._config.read(filename)
        self._filename = filename

    def has_option(self, section, name):
        return self._config.has_option(section, name)

    def get_option(self, section, name):
        if not self._config.has_section(section):
            raise ConfigError("Invalid section: %s" % section)
        if self._config.has_option(section, name):
            return self._config.get(section, name)
        raise ConfigError("Config file %s does not have option: %s/%s" % (self._filename, section, name))

    def get_str(self, section, name):
        return self.get_option(section, name)

    def get_bool(self, section, name):
        opt = self.get_option(section, name)
        if type(opt) == type(""):
            if opt.lower() == 'yes' or opt.lower() == 'true':
                return True
            elif opt.lower() == 'no' or opt.lower() == 'false':
                return False
        raise ConfigError("Invalid format for %s/%s.  Should be one of [yes, no, true, false]." % (section, name))

    def get_list(self, section, name):
        opt = self.get_option(section, name)
        if type(opt) == type(""):
            if not len(opt):
                return []
            try:
                return opt.split()
            except Exception:
                pass
        raise ConfigError("Invalid format for %s/%s.  Should be a space-separate list." % (section, name))

    def get_int(self, section, name):
        opt = self.get_option(section, name)
        try:
            return int(opt)
        except Exception:
            pass
        raise ConfigError("Invalid format for %s/%s.  Should be a valid integer." % (section, name))

    def add_section(self, section):
        self._config.add_section(section)

    def set_option(self, section, name, value):
        if not self._config.has_section(section):
            self._config.add_section(section)
        self._config.set(section, name, value)

    def save(self, filename=None):
        if not filename:
            filename = self._filename
        fp = open(filename, 'w')
        self._config.write(fp)
        self._filename = filename

class ServerConfig(BaseConfig):
    def __init__(self, filename):
        BaseConfig.__init__(self, filename)
        try:
            self.open()
        except ConfigError:
            print "Config file did not exist.  Writing %s with default values." % filename
            self.save_default_config()
        self._targets = []

    def targets(self):
        return self._targets

    def load_target_configs(self):
        cfg_dir = self.get_str("Directories", "target_configs_dir")
        if not os.path.exists(cfg_dir) or not os.access(cfg_dir, os.R_OK):
            return
        # Don't ever load targets twice
        if len(self._targets) > 0:
            return
        files = os.listdir(cfg_dir)
        for f in files:
            if not f.endswith(".cfg"):
                continue
            cfg_file = os.path.join(cfg_dir, f)
            target_cfg = TargetConfig(self, cfg_file)
            self._targets.append(target_cfg)

    def save_default_config(self, filename=None):
        self.add_section("General")
        self.set_option("General", "hostname", "localhost")

        self.add_section("Directories")
        self.set_option("Directories", "server_work_dir", "/rpmbuild",)
        self.set_option("Directories", "repo_dir", "/repodir")
        self.set_option("Directories", "tmpdir", "/tmp")
        self.set_option("Directories", "target_configs_dir", "/etc/plague/targets")

        self.add_section("Email")
        self.set_option("Email", "email_from", "buildsys at foo.com")
        self.set_option("Email", "admin_emails", "")
        self.set_option("Email", "success_emails", "")

        self.add_section("Builders")
        self.set_option("Builders", "use_ssl", "yes")
        self.set_option("Builders", "builders", "127.0.0.1:8888")

        self.add_section("SSL")
        self.set_option("SSL", "server_key_and_cert", "/etc/plague/server/certs/server_key_and_cert.pem")
        self.set_option("SSL", "ca_cert", "/etc/plague/server/certs/ca_cert.pem")

        self.add_section("CVS")
        self.set_option("CVS", "use_cvs", "no")

        self.add_section("UI")
        self.set_option("UI", "use_ssl", "yes")
        self.set_option("UI", "client_ca_cert", "/etc/plague/server/certs/ui_ca_cert.pem")
        self.set_option("UI", "port", "8887")
        self.set_option("UI", "guest_allowed", "yes")
        self.set_option("UI", "log_url", "http://www.foo.com/logs/")

        self.save()


class TargetConfig(BaseConfig):
    def __init__(self, cfg, filename):
        BaseConfig.__init__(self, filename)
        try:
            self.open()
        except ConfigError:
            print "Config file did not exist.  Writing %s with default values." % filename
            self.save_default_config()
        self._parent_cfg = cfg

    def parent_cfg(self):
        return self._parent_cfg

    def addl_pkg_arches(self):
        if not self._config.has_section("Additional Package Arches"):
            return []
        items = self._config.items("Additional Package Arches")
        addl_arches = {}
        for (pkg, arches) in items:
            if type(arches) != type("") or not len(arches):
                continue
            try:
                l = opt.split()
            except Exception:
                continue
            addl_arches[pkg] = l
        return addl_arches

    def save_default_config(self, filename=None):
        self.add_section("General")
        self.set_option("General", "name", "development")
        self.set_option("General", "scratch", "no")
        self.set_option("General", "repo_script", "")

        self.add_section("Arches")
        self.set_option("Arches", "base_arches", "i386")
        self.set_option("Arches", "optional_arches", "")

        self.add_section("Aliases")
        self.set_option("Aliases", "user_aliases", "devel")
        self.set_option("Aliases", "cvs_aliases", "devel")

        self.add_section("Additional Package Arches")
        self.set_option("Additional Package Arches", "kernel", "i686")

        self.add_section("CVS")
        self.set_option("CVS", "cvs_root", "")
        self.set_option("CVS", "cvs_rsh", "")

        self.save()


Index: ArchJob.py
===================================================================
RCS file: /cvs/fedora/extras-buildsys/server/ArchJob.py,v
retrieving revision 1.10
retrieving revision 1.11
diff -u -r1.10 -r1.11
--- ArchJob.py	8 Aug 2005 02:54:16 -0000	1.10
+++ ArchJob.py	15 Aug 2005 03:18:20 -0000	1.11
@@ -25,24 +25,15 @@
 from plague import FileDownloader
 from plague import CommonErrors
 
-# Load in the config
-execfile("/etc/plague/server/CONFIG.py")
-
-
-# SSL certificate and key filenames
-certs = {}
-certs['key_and_cert'] = config_opts['server_key_and_cert']
-certs['ca_cert'] = config_opts['ca_cert']
-certs['peer_ca_cert'] = config_opts['ca_cert']
-
 
 class ArchJob:
     """ Tracks a single build instance for a single arch on a builder """
 
-    def __init__(self, builder, server, par_job, jobid, target, arch):
+    def __init__(self, builder, cfg, server, par_job, jobid, target, arch):
         self.par_job = par_job
         self.builder = builder
         self._server = server
+        self._use_ssl = cfg.get_bool("Builders", "use_ssl")
         self.jobid = jobid
         self.status = 'running'
         self.builder_status = ''
@@ -55,6 +46,12 @@
         self._die = False
         self._die_lock = threading.Lock()
 
+        # SSL certificate and key filenames
+        self._certs = {}
+        self._certs['key_and_cert'] = cfg.get_str("SSL", "server_key_and_cert")
+        self._certs['ca_cert'] = cfg.get_str("SSL", "ca_cert")
+        self._certs['peer_ca_cert'] = cfg.get_str("SSL", "ca_cert")
+
     def _builder_finished(self):
         if self.builder_status == 'done' or self.builder_status == 'killed' or self.builder_status == 'failed' or self.builder_status == 'orphaned':
             return True
@@ -177,9 +174,9 @@
                     target_dir = os.path.join(self.par_job.get_stage_dir(), self.arch)
                     if not os.path.exists(target_dir):
                         os.makedirs(target_dir)
-                    if config_opts['ssl_builders']:
+                    if self._use_ssl:
                         dl_thread = FileDownloader.FileDownloader(self.dl_callback, url, url,
-                                    target_dir, ['.rpm', '.log'], certs)
+                                    target_dir, ['.rpm', '.log'], self._certs)
                     else:
                         dl_thread = FileDownloader.FileDownloader(self.dl_callback, url, url,
                                     target_dir, ['.rpm', '.log'], None)


Index: BuildMaster.py
===================================================================
RCS file: /cvs/fedora/extras-buildsys/server/BuildMaster.py,v
retrieving revision 1.30
retrieving revision 1.31
diff -u -r1.30 -r1.31
--- BuildMaster.py	8 Aug 2005 02:54:16 -0000	1.30
+++ BuildMaster.py	15 Aug 2005 03:18:20 -0000	1.31
@@ -23,11 +23,11 @@
 import os
 import Repo
 import copy
+import Config
 
 
 # Load in the config
 CONFIG_LOCATION = "/etc/plague/server/"
-execfile(CONFIG_LOCATION + "CONFIG.py")
 
 
 def ensure_job_db_tables(dbcx):
@@ -102,15 +102,18 @@
 
     MAX_CHECKOUT_JOBS = 5
 
-    def __init__(self, hostname, builder_manager):
+    def __init__(self, builder_manager, cfg):
         self.builder_manager = builder_manager
-        self.hostname = hostname
+        self.hostname = cfg.get_str("General", "hostname")
         self.should_stop = False
         self._paused = False
-        self.repos = {}
-        for target in config_opts['targets'].keys():
-            repo = Repo.Repo(target, builder_manager)
-            self.repos[target] = repo
+        self._cfg = cfg
+
+        self._repos = {}
+        repodir = self._cfg.get_str("Directories", "repo_dir")
+        for target_cfg in self._cfg.targets():
+            repo = Repo.Repo(target_cfg, repodir, builder_manager)
+            self._repos[repo.target()] = repo
             repo.start()
 
         self._done_queue = []
@@ -140,12 +143,8 @@
         ensure_job_db_tables(self.dbcx)
 
         self._requeue_interrupted_jobs()
-
         threading.Thread.__init__(self)
 
-    def __del__(self):
-        self.dbcx.close()
-
     def _requeue_interrupted_jobs(self):
         """ Restart interrupted jobs from our db. """
         self.curs.execute('SELECT uid FROM jobs WHERE (status!="needsign" AND status!="failed" AND status!="finished") ORDER BY uid')
@@ -192,19 +191,22 @@
 
             # Now requeue the job
             try:
-                repo = self.repos[row['target']]
+                repo = self._repos[row['target']]
             except KeyError:
-                print "%s (%s): Target '%s' not found." % (uid, row['package'], row['target'])
+                print "%s (%s): Target '%s' not found." % (uid,
+                        row['package'], row['target'])
             else:
-                job = PackageJob.PackageJob(uid, row['username'], row['package'], row['cvs_tag'], repo, self, self.hostname)
-                print "%s (%s): Restarting '%s' on target '%s'" % (uid, row['package'], row['cvs_tag'], row['target'])
+                job = PackageJob.PackageJob(uid, row['username'], row['package'],
+                        row['cvs_tag'], repo, self)
+                print "%s (%s): Restarting '%s' on target '%s'" % (uid,
+                        row['package'], row['cvs_tag'], row['target'])
                 self._building_jobs_lock.acquire()
                 self._building_jobs[uid] = job
                 self._building_jobs_lock.release()
 
     def stop(self):
         self.should_stop = True
-        for repo in self.repos.values():
+        for repo in self._repos.values():
             repo.stop()
 
     def create_job_request(self, email, package, source, target, buildreq, time):
@@ -252,7 +254,8 @@
         self._checkout_wait_queue_lock.acquire()
 
         # We allow only 5 jobs at a time in checkout stage
-        allowed_jobs = min(self.MAX_CHECKOUT_JOBS - self._checkout_num, len(self._checkout_wait_queue))
+        allowed_jobs = min(self.MAX_CHECKOUT_JOBS - self._checkout_num,
+                len(self._checkout_wait_queue))
         for i in range(allowed_jobs):
             job = self._checkout_wait_queue[i]
             self._checkout_num = self._checkout_num + 1
@@ -302,7 +305,8 @@
     def _write_job_status_to_db(self, uid, attrdict):
         sql = 'status="%s"' % attrdict['status']
         if attrdict.has_key('epoch') and attrdict.has_key('version') and attrdict.has_key('release'):
-            sql = sql + ', epoch="%s", version="%s", release="%s"' % (attrdict['epoch'], attrdict['version'], attrdict['release'])
+            sql = sql + ', epoch="%s", version="%s", release="%s"' % (attrdict['epoch'],
+                    attrdict['version'], attrdict['release'])
         if attrdict.has_key('result_msg'):
             import urllib
             sql = sql + ', result_msg="%s"' % (urllib.quote(attrdict['result_msg']))
@@ -333,7 +337,9 @@
         else:            
             try:
                 self.curs.execute('UPDATE archjobs SET status="%s", builder_status="%s", endtime=%d ' \
-                    'WHERE jobid="%s" AND parent_uid=%d' % (attrdict['status'], attrdict['builder_status'], attrdict['endtime'], uid, attrdict['parent_uid']))
+                    'WHERE jobid="%s" AND parent_uid=%d' % (attrdict['status'], 
+                    attrdict['builder_status'], attrdict['endtime'], uid,
+                    attrdict['parent_uid']))
             except sqlite.OperationalError, e:
                 print "DB Error: could not access jobs database. Reason: '%s'" % e
 
@@ -378,9 +384,9 @@
             # If two of the same job are submitted close together, we need
             # to make sure we pick the last result to get the correct one
             row = data[len(data) - 1]
-            repo = self.repos[item['target']]
+            repo = self._repos[item['target']]
             job = PackageJob.PackageJob(row['uid'], item['email'], item['package'],
-                    item['source'], repo, self, self.hostname)
+                    item['source'], repo, self)
 
             print "%s (%s): Starting tag '%s' on target '%s'" % (row['uid'], \
                     item['package'], item['source'], item['target'])


Index: Builder.py
===================================================================
RCS file: /cvs/fedora/extras-buildsys/server/Builder.py,v
retrieving revision 1.11
retrieving revision 1.12
diff -u -r1.11 -r1.12
--- Builder.py	8 Aug 2005 02:54:16 -0000	1.11
+++ Builder.py	15 Aug 2005 03:18:20 -0000	1.12
@@ -27,16 +27,7 @@
 import OpenSSL
 import ArchJob
 import EmailUtils
-
-# Load in the config
-execfile("/etc/plague/server/CONFIG.py")
-
-
-# SSL certificate and key filenames
-certs = {}
-certs['key_and_cert'] = config_opts['server_key_and_cert']
-certs['ca_cert'] = config_opts['ca_cert']
-certs['peer_ca_cert'] = config_opts['ca_cert']
+import Config
 
 
 class Builder(threading.Thread):
@@ -44,7 +35,7 @@
 
     _BUILDER_PING_INTERVAL = 60 * 5      # In seconds
 
-    def __init__(self, manager, address):
+    def __init__(self, manager, cfg, address):
         self._cur_jobid = None
         self._manager = manager
         self._jobs = {}
@@ -57,8 +48,14 @@
         self._ping_timeout = 0
         self._cur_ping_interval = self._BUILDER_PING_INTERVAL
         self._when_died = 0
+        self._server_cfg = cfg
+
+        certs = {}
+        certs['key_and_cert'] = self._server_cfg.get_str("SSL", "server_key_and_cert")
+        certs['ca_cert'] = self._server_cfg.get_str("SSL", "ca_cert")
+        certs['peer_ca_cert'] = self._server_cfg.get_str("SSL", "ca_cert")
 
-        if config_opts['ssl_builders']:
+        if self._server_cfg.get_bool("Builders", "use_ssl"):
             self._server = XMLRPCServerProxy.PlgXMLRPCServerProxy(self._address, certs)
         else:
             self._server = XMLRPCServerProxy.PlgXMLRPCServerProxy(self._address, None)
@@ -134,7 +131,8 @@
             self._server_lock.release()
             raise RuntimeError
 
-        job = ArchJob.ArchJob(self, self._server, par_job, jobid, target, arch)
+        use_ssl = self._server_cfg.get_bool("Builders", "use_ssl")
+        job = ArchJob.ArchJob(self, self._server_cfg, self._server, par_job, jobid, target, arch)
         self._jobs[jobid] = job
         self._update_cur_job()
         self._server_lock.release()
@@ -196,8 +194,9 @@
         print "Suspending builder '%s' because it timed out." % self._address
         subject = "Builder Timeout: %s" % self._address
         msg = "The builder '%s' timed out and was suspended." % self._address
-        for addr in config_opts['admin_emails']:
-            EmailUtils.email_result(addr, msg, subject)
+        sender = self._server_cfg.get_str("Email", "email_from")
+        for addr in self._server_cfg.get_list("Email", "admin_emails"):
+            EmailUtils.email_result(sender, addr, msg, subject)
 
     def _handle_builder_reactivate(self, target_arches):
         self._alive = True
@@ -212,8 +211,9 @@
   Suspended at: %s
   Re-Enabled at: %s
 """ % (self._address, time.ctime(self._when_died), time.ctime(time.time()))
-        for addr in config_opts['admin_emails']:
-            EmailUtils.email_result(addr, msg, subject)
+        sender = self._server_cfg.get_str("Email", "email_from")
+        for addr in self._server_cfg.get_list("Email", "admin_emails"):
+            EmailUtils.email_result(sender, addr, msg, subject)
         self._when_died = 0
 
     def run(self):


Index: BuilderManager.py
===================================================================
RCS file: /cvs/fedora/extras-buildsys/server/BuilderManager.py,v
retrieving revision 1.11
retrieving revision 1.12
diff -u -r1.11 -r1.12
--- BuilderManager.py	8 Aug 2005 02:54:16 -0000	1.11
+++ BuilderManager.py	15 Aug 2005 03:18:20 -0000	1.12
@@ -23,9 +23,7 @@
 import threading
 import Builder
 import EmailUtils
-
-# Load in the config
-execfile("/etc/plague/server/CONFIG.py")
+import Config
 
 
 class BuilderManager:
@@ -33,14 +31,10 @@
     Tracks individual builder instances.
     """
 
-    def __init__(self):
+    def __init__(self, cfg):
+        self._cfg = cfg
         self._builders_lock = threading.Lock()
 
-        # List of addresses of possible builders
-        self._builders_lock.acquire()
-        self.possible_builders = config_opts['builders']
-        self._builders_lock.release()
-
         self._builders = []
         self.add_new_builders()
 
@@ -75,19 +69,21 @@
     def add_new_builders(self):
         self._builders_lock.acquire()
 
-        # Load in any new builders from the config file
-        execfile("/etc/plague/server/CONFIG.py")
-        self.possible_builders = config_opts['builders']
-
-        for address in self.possible_builders:
-            # If the address is "https" but we aren't set up for SSL, exit
-            if address.startswith('https') and not config_opts['ssl_builders']:
-                print "Builder address (%s) starts with 'https', but the 'ssl_builders' option is set to False." % address
-                continue
-            elif address.startswith('http:') and config_opts['ssl_builders']:
-                print "Builder address (%s) starts with 'http', but the 'ssl_builders' option is set to True." % address
-                continue
+        builder_list = []
+        tmp_list = self._cfg.get_list("Builders", "builders")
+        prefix = "http://"
+        if self._cfg.get_bool("Builders", "use_ssl") == True:
+            prefix = "https://"
+        for addr in tmp_list:
+            # Rewrite addresses to match current builder connection method
+            if addr.startswith("http://"):
+                addr = addr[7:]
+            elif addr.startswith("https://"):
+                addr = addr[8:]
+
+            builder_list.append(prefix + addr)
 
+        for address in builder_list:
             # If the address is already in our _builders list, skip it
             skip = False
             for builder in self._builders:
@@ -97,7 +93,7 @@
                 continue
 
             # Add the builder to our build list
-            builder = Builder.Builder(self, address)
+            builder = Builder.Builder(self, self._cfg, address)
             builder.start()
             self._builders.append(builder)
 


Index: EmailUtils.py
===================================================================
RCS file: /cvs/fedora/extras-buildsys/server/EmailUtils.py,v
retrieving revision 1.1
retrieving revision 1.2
diff -u -r1.1 -r1.2
--- EmailUtils.py	25 Jul 2005 19:47:15 -0000	1.1
+++ EmailUtils.py	15 Aug 2005 03:18:20 -0000	1.2
@@ -18,18 +18,14 @@
 import smtplib
 from email.MIMEText import MIMEText
 
-# Load in the config
-CONFIG_LOCATION = "/etc/plague/server/"
-execfile(CONFIG_LOCATION + "CONFIG.py")
 
-
-def email_result(to, resultstring, subject=None):
+def email_result(sender, to, resultstring, subject=None):
     msg = MIMEText(resultstring)
     msg['Subject'] = subject
-    msg['From'] = config_opts['email_from']
+    msg['From'] = sender
     msg['To'] = to
     s = smtplib.SMTP()
     s.connect()
-    s.sendmail(config_opts['email_from'], [to], msg.as_string())
+    s.sendmail(sender, [to], msg.as_string())
     s.close()
 


Index: Makefile
===================================================================
RCS file: /cvs/fedora/extras-buildsys/server/Makefile,v
retrieving revision 1.6
retrieving revision 1.7
diff -u -r1.6 -r1.7
--- Makefile	25 Jul 2005 19:47:15 -0000	1.6
+++ Makefile	15 Aug 2005 03:18:20 -0000	1.7
@@ -14,6 +14,7 @@
 	BuilderManager.py \
 	Builder.py \
 	BuildMaster.py \
+    Config.py       \
 	EmailUtils.py \
 	PackageJob.py \
 	Repo.py \
@@ -29,7 +30,6 @@
 	$(MKDIR) -p $(OTHERINSTDIR)
 	for file in $(FILES); do $(INSTALL) -m 644 $$file $(OTHERINSTDIR)/$$file; done
 	$(MKDIR) -p $(CONFIGDIR)
-	$(INSTALL) -m 755 CONFIG.py $(CONFIGDIR)/CONFIG.py
 	$(MKDIR) -p $(CONFIGDIR)/addl_pkg_arches
 	$(INSTALL) -m 755 addl_pkg_arches/devel-addl-arches $(CONFIGDIR)/addl_pkg_arches/devel-addl-arches
 	$(MKDIR) -p $(CONFIGDIR)/certs


Index: PackageJob.py
===================================================================
RCS file: /cvs/fedora/extras-buildsys/server/PackageJob.py,v
retrieving revision 1.21
retrieving revision 1.22
diff -u -r1.21 -r1.22
--- PackageJob.py	6 Aug 2005 17:58:34 -0000	1.21
+++ PackageJob.py	15 Aug 2005 03:18:20 -0000	1.22
@@ -34,12 +34,8 @@
 import ArchJob
 from plague import ArchUtils
 
-# Load in the config
-execfile("/etc/plague/server/CONFIG.py")
-
-os.environ['CVSROOT'] = config_opts['pkg_cvs_root']
-if len(config_opts['pkg_cvs_rsh']) > 0:
-    os.environ['CVS_RSH'] = config_opts['pkg_cvs_rsh']
+CVS_CMD = "/usr/bin/cvs"
+MAKE_CMD = "/usr/bin/make"
 
 DEBUG = False
 def debugprint(stuff=''):
@@ -100,18 +96,16 @@
         return True
     return False
 
-def make_job_log_url(target, uid, name, ver, release):
+def make_job_log_url(base_url, target, uid, name, ver, release):
     if target and uid and name and ver and release:
-        return "%s/%s/%s-%s-%s-%s/" % (config_opts['log_url'], target, uid, name, ver, release)
+        return "%s/%s/%s-%s-%s-%s/" % (base_url, target, uid, name, ver, release)
     return None
 
 
 class PackageJob:
     """ Controller object for building 1 SRPM on multiple arches """
 
-    http_dir = os.path.join(config_opts['server_work_dir'], "srpm_http_dir")
-
-    def __init__(self, uid, username, package, cvs_tag, repo, buildmaster, hostname):
+    def __init__(self, uid, username, package, cvs_tag, repo, buildmaster):
         self.curstage = ''
         self.result = 'in-progress'
         self.bm = buildmaster
@@ -122,13 +116,15 @@
         self.ver = None
         self.release = None
 
-        self.hostname = hostname
+        self._target_cfg = repo.target_cfg()
+        self._server_cfg = self._target_cfg.parent_cfg()
+
         self.username = username
         self.starttime = time.time()
         self.endtime = 0
         self.target = repo.target()
         self.repo = repo
-        self.no_cvs = config_opts['use_srpm_not_cvs']
+        self.use_cvs = self._server_cfg.get_bool("CVS", "use_cvs")
         self.cvs_tag = cvs_tag
         self.result_dir = None
         self.srpm_path = None
@@ -140,8 +136,11 @@
         self._killer = None
         self._die = False
 
+        self.http_dir = os.path.join(self._server_cfg.get_str("Directories",
+                "server_work_dir"), "srpm_http_dir")
+
         first_stage = 'initialize'
-        if self.no_cvs == True:
+        if self.use_cvs == False:
             first_stage = 'prep'
         pjc = PackageJobController(self, first_stage, 'waiting')
         pjc.start()
@@ -175,44 +174,24 @@
         return self.uid
         
     def arch_handling(self, hdr):
-        # Grab additional build arches out of the Additional Package
-        # Arches file
-        apa_file_name = self.target + "addl-arches"
-        apa_file = os.path.join(config_opts['addl_package_arches_dir'], apa_file_name)
         addl_arches = []
         try:
-            f = open(apa_file, "r")
-        except IOError, e:
+            addl_arches = self._target_cfg.addl_pkg_arches()[self.name]
+        except KeyError:
             pass
-        else:
-            for line in f.readlines():
-                line = line.strip()
-                tmp_split = line.split(':')
-                if len(tmp_split) == 2:
-                    package = tmp_split[0]
-                    if package == self.name:
-                        tmp_arches = tmp_split[1]
-                        tmp_arches = tmp_arches.strip()
-                        addl_arches = tmp_arches.split(' ')
-                        break
-
-        targets = config_opts['targets']
-        buildable_arches = targets[self.target]
-
-        target_opt_arches = config_opts['target_optional_arches']
-        opt_arches = []
-        if target_opt_arches.has_key(self.target):
-            opt_arches = target_opt_arches[self.target]
+
+        base_arches = self._target_cfg.get_list("Arches", "base_arches")
+        opt_arches = self._target_cfg.get_list("Arches", "optional_arches")
 
         # Remove arches we don't support from addl_arches
         for arch in addl_arches:
             # ArchUtils.sub_arches is only used to determine which arches to build on by default,
-            # so that if we have an Additional Package Arches file that specifies
+            # so that if we have an Additional Package Arches that specifies
             # 'sparcv9' for a package that we don't try to build sparcv9 for that
             # package unless 'sparc' is also listed in our 'targets' config option.
             if ArchUtils.sub_arches.has_key(arch):
                 master_addl_arch = ArchUtils.sub_arches[arch]
-                if master_addl_arch not in buildable_arches:
+                if master_addl_arch not in base_arches:
                     addl_arches.remove(arch)
 
         ba = hdr['buildarchs']
@@ -231,7 +210,7 @@
         # is enabled for this target
         pkg_arches = []
         allowed_arches = []
-        for arch in buildable_arches:
+        for arch in base_arches:
             pkg_arches.append(arch)
             allowed_arches.append(arch)
         for arch in addl_arches:
@@ -281,13 +260,21 @@
 
         # Create the temporary checkout directory
         dirname = "%s-%s-%d" % (self.uid, self.cvs_tag, time.time())
-        self.checkout_tmpdir = os.path.join(config_opts['tmpdir'], dirname)
+        tmpdir = self._server_cfg.get_str("Directories", "tmpdir")
+        self.checkout_tmpdir = os.path.join(tmpdir, dirname)
         if os.path.exists(self.checkout_tmpdir):
             shutil.rmtree(self.checkout_tmpdir, ignore_errors=True)
         os.makedirs(self.checkout_tmpdir)
 
+        # Set up CVS environment
+        env_args = "CVSROOT='%s'" % self._target_cfg.get_str("CVS", "cvs_root")
+        cvs_rsh = self._target_cfg.get_str("CVS", "cvs_rsh")
+        if len(cvs_rsh) > 0:
+            env_args = "%s CVS_RSH='%s'" % (env_args, cvs_rsh)
+
         # Checkout the module
-        cmd = 'cd %s; %s co -r %s %s' % (self.checkout_tmpdir, config_opts['cvs_cmd'], self.cvs_tag, self.package)
+        cmd = 'cd %s; %s co -r %s %s %s' % (self.checkout_tmpdir, CVS_CMD,
+                self.cvs_tag, self.package, env_args)
         debugprint("%d: Running %s" % (self.uid, cmd))
         s, o = commands.getstatusoutput(cmd)
         if s != 0:
@@ -297,7 +284,7 @@
             # get it from CVS
             pkg_path = os.path.join(self.checkout_tmpdir, self.package)
             if not os.path.exists(os.path.join(pkg_path, "common")):
-                cmd = 'cd %s; %s co common' % (pkg_path, config_opts['cvs_cmd'])
+                cmd = 'cd %s; %s co common %s' % (pkg_path, CVS_CMD, env_args)
                 debugprint("%d: Running %s" % (self.uid, cmd))
                 s, o = commands.getstatusoutput(cmd)
                 if s != 0:
@@ -313,12 +300,11 @@
 
     def _stage_make_srpm(self):
         # Map our target (self.target) to the CVS target alias, since CVS may have
-        # different target names that we expose
-        cvs_target_map = config_opts['cvs_target_map']
-        try:
-            cvs_target = cvs_target_map[self.target]
-        except KeyError:
-            cvs_target = self.target
+        # different target names than we expose
+        cvs_target = self.target
+        cvs_aliases = self._target_cfg.get_str("Aliases", "cvs_aliases")
+        if len(cvs_aliases) > 0:
+            cvs_target = cvs_aliases
 
         self.srpm_path = None
         srpm_dir = os.path.join(self.checkout_tmpdir, self.package, cvs_target)
@@ -326,7 +312,7 @@
             msg = "could not find path %s for %s." % (srpm_dir, self.cvs_tag)
             raise PrepError(msg)
 
-        cmd = 'cd %s; %s srpm' % (srpm_dir, config_opts['make_cmd'])
+        cmd = 'cd %s; %s srpm' % (srpm_dir, MAKE_CMD)
         debugprint("%d: Running %s in %s" % (self.uid, cmd, srpm_dir))
         s, o = commands.getstatusoutput(cmd)
         if s != 0:
@@ -369,7 +355,7 @@
     def _stage_prep(self):
 
         # In SRPM-only mode, cvs_tag is path to the SRPM to build
-        if self.no_cvs:
+        if self.use_cvs == False:
             self.srpm_path = self.cvs_tag
 
         ts = rpmUtils.transaction.initReadOnlyTransaction()
@@ -391,7 +377,8 @@
 """ % (self.cvs_tag, pkg_arches, allowed_arches)
             raise PrepError(msg)
 
-        self.result_dir = self._make_stage_dir(config_opts['server_work_dir'])
+        work_dir = self._server_cfg.get_str("Directories", "server_work_dir")
+        self.result_dir = self._make_stage_dir(work_dir)
         for arch in self.archjobs.keys():
             thisdir = os.path.join(self.result_dir, arch)
             if not os.path.exists(thisdir):
@@ -404,7 +391,7 @@
         self.srpm_path = None
 
         # Remove CVS checkout and make_srpm dirs
-        if not self.no_cvs:
+        if self.use_cvs == True:
             shutil.rmtree(self.checkout_tmpdir, ignore_errors=True)
 
         self._request_arch_jobs()
@@ -414,11 +401,13 @@
     def _request_one_arch_job(self, arch, orphaned):
         # Construct SPRM URL
         srpm_http_base = self.srpm_http_path[len(self.http_dir):]
-        if config_opts['ssl_builders'] == True:
+        use_ssl = self._server_cfg.get_bool("Builders", "use_ssl")
+        if use_ssl == True:
             method = "https://"
         else:
             method = "http://"
-        srpm_url = method + self.hostname + ":8886/" + srpm_http_base
+        hostname = self._server_cfg.get_str("General", "hostname")
+        srpm_url = method + hostname + ":8886/" + srpm_http_base
         self.bm.builder_manager.request_arch_job(self, self.target, arch, srpm_url, orphaned)
 
     def _request_arch_jobs(self):
@@ -507,14 +496,15 @@
                     self._event.wait()
                 self._event.clear()
         except PrepError, e:
-            if not self.no_cvs:
+            if self.use_cvs == True:
                 shutil.rmtree(self.checkout_tmpdir, ignore_errors=True)
             subj = 'Prep Error (Job %s): %s on %s' % (self.uid, self.cvs_tag, self.target)
             self.email_result(self.username, resultstring=e.args, subject=subj)
             self._stage_failed(e.args)
         except BuildError, e:
             subj = 'Build Error (Job %s): %s on %s' % (self.uid, self.cvs_tag, self.target)
-            log_url = make_job_log_url(self.target, self.uid, self.name, self.ver, self.release)
+            base_url = self._server_cfg.get_str("UI", "log_url")
+            log_url = make_job_log_url(base_url, self.target, self.uid, self.name, self.ver, self.release)
             msg = "%s\n\n         Build logs may be found at %s\n\n" % (e.msg, log_url)
             logtail = self._get_log_tail(e.arch)
             msg = "%s\n-------------------------------------------------\n\n%s\n" % (msg, logtail)
@@ -540,7 +530,11 @@
         self._archjobs_lock.release()
 
         if completed_jobs == len(self.archjobs):
-            self._set_cur_stage('add_to_repo')
+            # Scratch targets don't contribute packages to the repo
+            if self._target_cfg.get_bool("General", "scratch") == True:
+                self._set_cur_stage('repodone')
+            else:
+                self._set_cur_stage('add_to_repo')
             return False  # Don't want to wait
 
         return True
@@ -583,6 +577,7 @@
     def _stage_add_to_repo(self):
         # Create a list of files that the repo should copy to
         # the repo dir
+        repo_dir = self._server_cfg.get_str("Directories", "repo_dir")
         for job in self.archjobs.values():
             if not job:
                 continue
@@ -592,9 +587,9 @@
                 src_file = os.path.join(self.result_dir, job.arch, f)
                 verrel = "%s-%s" % (self.ver, self.release)
                 if f.endswith(".src.rpm"):
-                    dst_path = os.path.join(config_opts['repo_dir'], self.target, self.name, verrel, "SRPM")
+                    dst_path = os.path.join(repo_dir, self.target, self.name, verrel, "SRPM")
                 else:
-                    dst_path = os.path.join(config_opts['repo_dir'], self.target, self.name, verrel, job.arch)
+                    dst_path = os.path.join(repo_dir, self.target, self.name, verrel, job.arch)
                 self.repofiles[src_file] = os.path.join(dst_path, f)
 
         self._event.clear()
@@ -619,12 +614,13 @@
 
         self._cleanup_job_files()
 
-        log_url = make_job_log_url(self.target, self.uid, self.name, self.ver, self.release)
+        base_url = self._server_cfg.get_str("UI", "log_url")
+        log_url = make_job_log_url(base_url, self.target, self.uid, self.name, self.ver, self.release)
         resultstring = resultstring + "\n     Build logs may be found at %s\n" % (log_url)
         self.email_result(self.username, resultstring)
 
         # Notify everyone else who might want to know that the build succeeded
-        for addr in config_opts['success_emails']:
+        for addr in self._server_cfg.get_list("Email", "success_emails"):
             self.email_result(addr, resultstring)
 
         self.bm.notify_job_done(self)
@@ -633,7 +629,8 @@
         """ Returns the last 30 lines of the most relevant log file """
 
         pkg_dir = "%s-%s-%s-%s" % (self.uid, self.name, self.ver, self.release)
-        log_dir = os.path.join(config_opts['server_work_dir'], self.target, pkg_dir, arch)
+        work_dir = self._server_cfg.get_str("Directories", "server_work_dir")
+        log_dir = os.path.join(work_dir, self.target, pkg_dir, arch)
         final_log = None
         build_log = "%s/%s" % (log_dir, "build.log")
         root_log = "%s/%s" % (log_dir, "root.log")
@@ -681,5 +678,6 @@
             if not name:
                 name = self.package
             subject = 'Build Result: %d - %s on %s' % (self.uid, name, self.target)
-        EmailUtils.email_result(to, resultstring, subject)
+        sender = self._server_cfg.get_str("Email", "email_from")
+        EmailUtils.email_result(sender, to, resultstring, subject)
 


Index: Repo.py
===================================================================
RCS file: /cvs/fedora/extras-buildsys/server/Repo.py,v
retrieving revision 1.11
retrieving revision 1.12
diff -u -r1.11 -r1.12
--- Repo.py	18 Jul 2005 21:11:27 -0000	1.11
+++ Repo.py	15 Aug 2005 03:18:20 -0000	1.12
@@ -19,42 +19,53 @@
 import shutil
 import time
 import commands
-
-
-# Load in the config
-execfile("/etc/plague/server/CONFIG.py")
+import popen2
+import fcntl
+import EmailUtils
 
 
 class Repo(threading.Thread):
     """ Represents an on-disk repository of RPMs and manages updates to the repo. """
 
-    def __init__(self, target, builder_manager):
+    def __init__(self, target_cfg, repodir, builder_manager):
         self._builder_manager = builder_manager
-        self._target = target
-        if not os.path.exists(config_opts['repo_dir']):
-            print "Error: Repository directory '%s' does not exist." % config_opts['repo_dir']
+        self._target_cfg = target_cfg
+        self._target = self._target_cfg.get_str("General", "name")
+        if not os.path.exists(repodir):
+            print "Error: Repository directory '%s' does not exist." % repodir
             os._exit(1)
-        self._repodir = os.path.join(config_opts['repo_dir'], target)
+        self._repodir = os.path.join(repodir, self._target)
         if not os.path.exists(self._repodir):
             os.makedirs(self._repodir)
-        self._repo_cache_dir = os.path.join(config_opts['repo_dir'], "cache", target)
+        self._repo_cache_dir = os.path.join(repodir, "cache", self._target)
         if not os.path.exists(self._repo_cache_dir):
             os.makedirs(self._repo_cache_dir)
         self._lock = threading.Lock()
         self._repo_additions = []
         self._lock_count = 0
         self._stop = False
+
+        self._pobj = None
+        self._repo_script_start = 0
+        self._repo_script = None
+        script = self._target_cfg.get_str("General", "repo_script")
+        if len(script):
+            self._repo_script = script
+
         threading.Thread.__init__(self)
 
     def target(self):
         return self._target
 
+    def target_cfg(self):
+        return self._target_cfg
+
     def request_copy(self, buildjob):
         """ Registers a BuildJob object that has files to copy to the repo """
 
         self._lock.acquire()
         self._repo_additions.append(buildjob)
-        # We enter lock level 1 here, preventing build clients from
+        # We enter lock level 1 here, preventing builders from
         # starting their 'prep' state
         if self._lock_count == 0:
             self._lock_count = 1
@@ -91,27 +102,99 @@
         if s != 0:
             print "Error: createrepo failed with exit status %d!  Output: '%s'" % (s, o)
 
+    def _run_repo_script(self):
+        cmd = "%s %s" % (self._repo_script, self._target)
+        print "Repo '%s': executing repository script %s" % (self._target, self._repo_script)
+        self._pobj = popen2.Popen4(cmd=cmd)
+        fcntl.fcntl(self._pobj.fromchild.fileno(), fcntl.F_SETFL, os.O_NONBLOCK)
+        self._repo_script_start = time.time()
+
+    def _email_repo_script_failure(self, subject):
+        server_cfg = self._target_cfg.parent_cfg()
+        admins = server_cfg.get_list("Email", "admin_emails")
+        sender = server_cfg.get_str("Email", "email_from")
+        msg = self._get_repo_script_output()
+        for addr in admins:
+            EmailUtils.email_result(sender, addr, msg, subject)
+
+    def _get_repo_script_output(self):
+        output = ""
+        while True:
+            try:
+                string = os.read(self._pobj.fromchild.fileno(), 1024)
+                if not len(string):
+                    break
+            except OSError, e:
+                break
+            output = output + string
+        return output
+
+    def _monitor_repo_script(self):
+        unlock = False
+        exit_status = self._pobj.poll()
+        if exit_status == 0:
+            print "Repo '%s': repo script %s done." % (self._target, self._repo_script)
+            unlock = True
+        elif exit_status > 0:
+            subj = "Repo '%s': repo script %s exited with error: %d." % (self._target, self._repo_script, exit_status)
+            self._email_repo_script_failure(subj)
+            print subj
+            unlock = True
+        else:
+            # If the repo script has been going for more than an hour, kill it
+            if time.time() > self._repo_script_start + (60 * 60):
+                try:
+                    os.kill(self._pobj.pid, 9)
+                except OSError:
+                    pass
+                subj = "Repo '%s': repo script %s timed out and was killed." % (self._target, self._repo_script)
+                self._email_repo_script_failure(subj)
+                print subj
+                unlock = True
+
+        if unlock:
+            self._repo_script_start = 0
+            self._lock_count = 0
+            self._pobj = None
 
     def run(self):
         while self._stop == False:
-            # We have 2 lock levels.  When the repo is in either, clients are prevented
-            # from starting their 'prep' state.  Clients may already be in the 'prep'
-            # state when we lock the repo, therefore we don't actually enter lock level
-            # 2 until all clients have finished their 'prep' state.  Only then do we
-            # copy RPMs to the repo and run createrepo on it.
+            # We have 3 lock levels:
+            #
+            # 0 - repo unlocked
+            # 1 - entered when jobs request packages to be copied to the repo;
+            #           builders blocked from entering the 'prep' state
+            # 2 - entered when no builders are currently in the 'prep' state;
+            #       packages copied to repo and createrepo is run
+            # 3 - entered when createrepo is done; repo script run
 
             prepping_builders = self._builder_manager.any_prepping_builders()
 
+            print "repo %s, count %d" % (self._target, self._lock_count)
             self._lock.acquire()
 
-            # If the lock level is 2, update the repo
+            # Level 2: update the repo
             if self._lock_count == 2:
                 print "Repo '%s': updating repository metadata..." % self._target
                 self._update_repo()
                 print "Repo '%s': Done updating." % self._target
-                self._lock_count = 0
 
-            # Enter lock level 2 if there are no build clients in the
+                # If there's a repo script for this target, enter level 3
+                if self._repo_script:
+                    self._run_repo_script()
+                    self._lock_count = 3
+                else:
+                    self._lock_count = 0
+
+            # Level 3: monitor the repo script
+            if self._lock_count == 3:
+                if self._pobj:
+                    self._monitor_repo_script()
+                else:
+                    # If for some reason self._pobj is None, unlock the repo
+                    self._lock_count = 0
+
+            # Enter lock level 2 if there are no builders in the
             # 'prep' state and we are already at lock level 1
             if not prepping_builders and self._lock_count == 1:
                 self._lock_count = 2


Index: User.py
===================================================================
RCS file: /cvs/fedora/extras-buildsys/server/User.py,v
retrieving revision 1.6
retrieving revision 1.7
diff -u -r1.6 -r1.7
--- User.py	2 Aug 2005 04:10:32 -0000	1.6
+++ User.py	15 Aug 2005 03:18:20 -0000	1.7
@@ -17,10 +17,7 @@
 
 import sqlite
 
-# Load in the config
 CONFIG_LOCATION = "/etc/plague/server/"
-execfile(CONFIG_LOCATION + "CONFIG.py")
-
 
 class User:
     def __init__(self, email, guest):
@@ -48,7 +45,8 @@
     Talks to a database of users & capabilities
     """
 
-    def __init__(self):
+    def __init__(self, cfg):
+        self._cfg = cfg
         (dbcx, curs) = get_userdb_dbcx()
         if not dbcx or not curs:
             print "Unable to open the user database.  Exiting..."
@@ -88,7 +86,7 @@
             user.user_admin = item['user_admin']
             user.server_admin = item['server_admin']
         else:
-            if config_opts['guest_allowed']:
+            if self._cfg.get_bool("UI", "guest_allowed"):
                 user = User('guest at guest', True)
         return user
 


Index: UserInterface.py
===================================================================
RCS file: /cvs/fedora/extras-buildsys/server/UserInterface.py,v
retrieving revision 1.44
retrieving revision 1.45
diff -u -r1.44 -r1.45
--- UserInterface.py	8 Aug 2005 02:54:16 -0000	1.44
+++ UserInterface.py	15 Aug 2005 03:18:20 -0000	1.45
@@ -24,12 +24,11 @@
 import exceptions
 import BuildMaster
 import PackageJob
+import Config
 from plague import AuthedXMLRPCServer
 
-# Load in the config
-CONFIG_LOCATION = "/etc/plague/server/"
-execfile(CONFIG_LOCATION + "CONFIG.py")
 
+CONFIG_LOCATION = "/etc/plague/server/"
 
 def get_dbcx():
     dbcx = None
@@ -79,43 +78,35 @@
 
 class InvalidTargetError(exceptions.Exception): pass
 
-def resolve_target(target):
-    client_target_map = config_opts['client_target_map']
-    target_dict = config_opts['targets']
-
-    resolved_target = target
-    done = True
-    for master_target in client_target_map.keys():
-        if not target_dict.has_key(master_target): # make sure we build it
-            continue # if not, move along
-        
-        if target_dict.has_key(resolved_target): # it's one of the masters
-            return resolved_target
-
-        # do the aliases
-        for alias in client_target_map[master_target]:
-            if resolved_target.lower() == alias.lower():
-                return  master_target
+def resolve_target(target, cfg):
+    for target_cfg in cfg.targets():
+        target_name = target_cfg.get_str("General", "name")
+        target_aliases = target_cfg.get_list("Aliases", "user_aliases")
+        if target.lower() == target_name.lower():
+            return target
+        for alias in target_aliases:
+            if target.lower() == alias.lower():
+                return target
 
     raise InvalidTargetError()
 
 
-
-def email_result(email, source, resultstring):
-    """send 'resultstring' to email"""
-    
-    subject = 'Enqueue Result: %s' % source
-    EmailUtils.email_result(email, resultstring, subject)
-
-
 class UserInterface:
     """
     Base UserInterface class. NO AUTHENTICATION.  Subclass this to provide some.
     """
 
-    def __init__(self, builder_manager, build_master):
+    def __init__(self, builder_manager, build_master, cfg):
         self._builder_manager = builder_manager
         self._bm = build_master
+        self._cfg = cfg
+
+    def email_result(self, to, source, resultstring):
+        """ send 'resultstring' to an email address """
+        
+        subject = 'Enqueue Result: %s' % source
+        sender = self._cfg.get_str("Email", "email_from")
+        EmailUtils.email_result(sender, to, resultstring, subject)
 
     def _wait_for_uid(self, req):
         """ Wait a bit to see if the UID comes back to us """
@@ -132,27 +123,30 @@
     def enqueue(self, email, package, cvs_tag, target, buildreq=None):
         """ Accept a job to build and stuff it into the job database """
 
-        if config_opts['use_srpm_not_cvs'] == True:
-            email_result(email, cvs_tag, "Error setting up build for %s on "\
+        if self._cfg.get_bool("CVS", "use_cvs") == False:
+            self.email_result(email, cvs_tag, "Error setting up build for %s on "\
                     "%s: this server builds SRPMs, not CVS checkouts." % (cvs_tag, target))
-            return (-1, "This build server is set up for building SRPMS only.  Use the 'enqueue_srpm' command instead.")
+            return (-1, "This build server is set up for building SRPMS only.  "\
+                        "Use the 'enqueue_srpm' command instead.", -1)
 
         if not validate_package_name(package):
-            email_result(email, cvs_tag, "Error setting up build for %s on "\
-                    "%s: Package name '%s' contained an illegal character.  Submit a bug report?" % (cvs_tag, target, package))
-            return (-1, "The package name contained an illegal character.")
+            self.email_result(email, cvs_tag, "Error setting up build for %s on "\
+                    "%s: Package name '%s' contained an illegal character.  "\
+                    "Submit a bug report?" % (cvs_tag, target, package))
+            return (-1, "The package name contained an illegal character.", -1)
 
         if not validate_cvs_tag(cvs_tag):
-            email_result(email, cvs_tag, "Error setting up build for %s on "\
-                    "%s: The CVS tag '%s' contained an illegal character.  Submit a bug report?" % (package, target, cvs_tag))
-            return (-1, "The CVS tag contained an illegal character.")
+            self.email_result(email, cvs_tag, "Error setting up build for %s on "\
+                    "%s: The CVS tag '%s' contained an illegal character.  "\
+                    "Submit a bug report?" % (package, target, cvs_tag))
+            return (-1, "The CVS tag contained an illegal character.", -1)
 
         try:
-            real_target = resolve_target(target)
+            real_target = resolve_target(target, self._cfg)
         except InvalidTargetError:
-            email_result(email, cvs_tag, "Error setting up build for %s on "\
+            self.email_result(email, cvs_tag, "Error setting up build for %s on "\
                     "%s: target does not exist." % (cvs_tag, target))
-            return (-1, "This build server does not support the target %s." % target)
+            return (-1, "This build server does not support the target %s." % target, -1)
         else:
             print "Request to enqueue '%s' tag '%s' for target '%s' (user '%s')" % (package, cvs_tag, real_target, email)
             req = self._bm.create_job_request(email, package, cvs_tag, real_target, buildreq, time.time())
@@ -163,34 +157,34 @@
     def enqueue_srpm(self, email, package, srpm_file, target, buildreq=None):
         """ Accept a job to build from SRPM file and stuff it into the job database """
 
-        if config_opts['use_srpm_not_cvs'] == False:
-            email_result(email, srpm_file, "Error setting up build for %s on "\
+        if self._cfg.get_bool("CVS", "use_cvs") == True:
+            self.email_result(email, srpm_file, "Error setting up build for %s on "\
                     "%s: this server builds CVS checkouts, not SRPMS." % (srpm_file, target))
-            return (-1, "This build server is set up for building from CVS.  Use the 'enqueue' command instead.")
+            return (-1, "This build server is set up for building from CVS.  Use the 'enqueue' command instead.", -1)
 
         if not validate_package_name(package):
-            email_result(email, srpm_file, "Error setting up build for %s on "\
+            self.email_result(email, srpm_file, "Error setting up build for %s on "\
                     "%s: Package name '%s' contained an illegal character.  Submit a bug report?" % (package, target, package))
-            return (-1, "The package name contained an illegal character.")
+            return (-1, "The package name contained an illegal character.", -1)
 
         # We limit the database field to 255 chars
         if len(srpm_file) > 255:
-            email_result(email, srpm_file, "Error setting up build for %s on "\
+            self.email_result(email, srpm_file, "Error setting up build for %s on "\
                     "%s: try using a shorter path to the SRPM (< 255 chars)." % (srpm_file, target))
-            return (-1, "Pathname to SRPM is limited to 255 characters.")
+            return (-1, "Pathname to SRPM is limited to 255 characters.", -1)
 
         srpm_file = os.path.abspath(srpm_file)
         if not srpm_file or not os.access(srpm_file, os.R_OK):
-            email_result(email, srpm_file, "Error setting up build for %s on "\
+            self.email_result(email, srpm_file, "Error setting up build for %s on "\
                     "%s: The SRPM does not exist, or is not accessible.  Remember to use absolute paths." % (srpm_file, target))
-            return (-1, "SRPM does not exist or is not accessible, remember to use absolute paths.")
+            return (-1, "SRPM does not exist or is not accessible, remember to use absolute paths.", -1)
 
         try:
-            real_target = resolve_target(target)
+            real_target = resolve_target(target, self._cfg)
         except InvalidTargetError:
-            email_result(email, srpm_file, "Error setting up build for %s on "\
+            self.email_result(email, srpm_file, "Error setting up build for %s on "\
                     "%s: target does not exist." % (srpm_file, target))
-            return (-1, "This build server does not support the target %s." % target)
+            return (-1, "This build server does not support the target %s." % target, -1)
         else:
             print "Request to enqueue '%s' file '%s' for target '%s' (user '%s')" % (package, srpm_file, real_target, email)
             req = self._bm.create_job_request(email, package, srpm_file, real_target, buildreq, time.time())
@@ -285,7 +279,7 @@
 
         if args_dict.has_key('target') and args_dict['target']:
             try:
-                real_target = resolve_target(args_dict['target'])
+                real_target = resolve_target(args_dict['target'], self._cfg)
             except InvalidTargetError:
                 return (-1, "Error: Invalid job UID.", [])
             sql_args.append('target="%s"' % real_target)
@@ -422,7 +416,10 @@
             jobrec['epoch'] = job['epoch']
             jobrec['version'] = job['version']
             jobrec['release'] = job['release']
-            log_url = PackageJob.make_job_log_url(jobrec['target'], str(uid), jobrec['package'], jobrec['version'], jobrec['release'])
+            base_url = self._server_cfg.get_str("UI", "log_url")
+            log_url = PackageJob.make_job_log_url(base_url, jobrec['target'],
+                        str(uid), jobrec['package'], jobrec['version'],
+                        jobrec['release'])
             if log_url and len(log_url):
                 jobrec['log_url'] = log_url
         if job['result_msg']:
@@ -468,8 +465,10 @@
         return (0, "Success.", builder_list)
 
     def targets(self):
-        target_dict = config_opts['targets']
-        return target_dict.keys()
+        targets = []
+        for target_cfg in self._cfg.targets():
+            targets.append(target_cfg.get_str("General", "name"))
+        return targets
 
     def pause(self, paused):
         s = "paused"


Index: main.py
===================================================================
RCS file: /cvs/fedora/extras-buildsys/server/main.py,v
retrieving revision 1.11
retrieving revision 1.12
diff -u -r1.11 -r1.12
--- main.py	22 Jul 2005 21:35:27 -0000	1.11
+++ main.py	15 Aug 2005 03:18:20 -0000	1.12
@@ -30,22 +30,19 @@
 import User
 import BuildMaster
 import BuilderManager
+import Config
 from UserInterface import UserInterfaceSSLAuth
 from UserInterface import UserInterfaceNoAuth
 
 
-# Load in the config
-execfile("/etc/plague/server/CONFIG.py")
-
-
 class AuthenticatedSSLXMLRPCServer(AuthedXMLRPCServer.AuthedSSLXMLRPCServer):
     """
     SSL XMLRPC server that authenticates clients based on their certificate.
     """
 
-    def __init__(self, address, certs):
+    def __init__(self, address, certs, cfg):
         AuthedXMLRPCServer.AuthedSSLXMLRPCServer.__init__(self, address, self.auth_cb, certs)
-        self.authenticator = User.Authenticator()
+        self.authenticator = User.Authenticator(cfg)
 
     def auth_cb(self, request, client_address):
         peer_cert = request.get_peer_certificate()
@@ -60,7 +57,7 @@
 #################################################################
 
 if __name__ == '__main__':
-    usage = "Usage: %s  [-p <pidfile>] [-l <logfile>] [-d] <hostname>" % sys.argv[0]
+    usage = "Usage: %s [-p <pidfile>] [-l <logfile>] [-d] -c <configfile>" % sys.argv[0]
     parser = OptionParser(usage=usage)
     parser.add_option("-p", "--pidfile", default=None,
         help='file to write the PID to')
@@ -68,14 +65,14 @@
         help="location of file to write log output to")
     parser.add_option("-d", "--daemon", default=False, action="store_true",
         help="daemonize (i.e., detach from the terminal)")
+    parser.add_option("-c", "--configfile", default=None,
+        help='server configuration file')
     (opts, args) = parser.parse_args()
 
-    if (not len(args) == 1):
-        print "Must specify a single hostname."
+    if not opts.configfile:
+        print "Must specify a config file."
         sys.exit(1)
 
-    hostname = args[0]
-
     if opts.daemon:
         ret=daemonize.createDaemon()
         if ret:
@@ -91,26 +88,34 @@
         sys.stdout=log
         sys.stderr=log
 
-    builder_manager = BuilderManager.BuilderManager()
+    # Load in our config, filling in with defaults if it doesn't exist
+    cfg = Config.ServerConfig("/etc/plague/server/plague-server.cfg")
+    cfg.load_target_configs()
+    if len(cfg.targets()) == 0:
+        print "You need at least one target to do anything useful."
+        sys.exit(3)
+
+    builder_manager = BuilderManager.BuilderManager(cfg)
 
     # Create the BuildMaster thread
-    bm = BuildMaster.BuildMaster(hostname, builder_manager)
+    hostname = cfg.get_str("General", "hostname")
+    bm = BuildMaster.BuildMaster(builder_manager, cfg)
     bm.start()
 
     # Create the BuildMaster XMLRPC server
-    UI_PORT = 8887
+    port = cfg.get_int("UI", "port")
     ui = None
     try:
-        if config_opts['ssl_frontend'] == True:
+        if cfg.get_bool("UI", "use_ssl") == True:
             ui_certs = {}
-            ui_certs['key_and_cert'] = config_opts['server_key_and_cert']
-            ui_certs['ca_cert'] = config_opts['ca_cert']
-            ui_certs['peer_ca_cert'] = config_opts['ui_ca_cert']
-            ui = UserInterfaceSSLAuth(builder_manager, bm)
-            bm_server = AuthenticatedSSLXMLRPCServer((hostname, UI_PORT), ui_certs)
+            ui_certs['key_and_cert'] = cfg.get_str("SSL", "server_key_and_cert")
+            ui_certs['ca_cert'] = cfg.get_str("SSL", "ca_cert")
+            ui_certs['peer_ca_cert'] = cfg.get_str("UI", "client_ca_cert")
+            ui = UserInterfaceSSLAuth(builder_manager, bm, cfg)
+            bm_server = AuthenticatedSSLXMLRPCServer((hostname, port), ui_certs, cfg)
         else:
-            ui = UserInterfaceNoAuth(builder_manager, bm)
-            bm_server = AuthedXMLRPCServer.AuthedXMLRPCServer((hostname, UI_PORT))
+            ui = UserInterfaceNoAuth(builder_manager, bm, cfg)
+            bm_server = AuthedXMLRPCServer.AuthedXMLRPCServer((hostname, port))
     except socket.error, e:
         if e[0] == 98:      # Address already in use
             print "Error: couldn't bind to address '%s:%s'.  Is the server already running?" % (hostname, UI_PORT)
@@ -120,16 +125,16 @@
 
     # SRPM fileserver
     SRPM_SERVER_PORT = 8886
-    http_dir = os.path.join(config_opts['server_work_dir'], "srpm_http_dir")
+    http_dir = os.path.join(cfg.get_str("Directories", "server_work_dir"), "srpm_http_dir")
     srpm_server_certs = {}
-    if config_opts['ssl_builders']:
-        srpm_server_certs['key_and_cert'] = config_opts['server_key_and_cert']
-        srpm_server_certs['ca_cert'] = config_opts['ca_cert']
-        srpm_server_certs['peer_ca_cert'] = config_opts['ca_cert']
+    if cfg.get_bool("Builders", "use_ssl"):
+        srpm_server_certs['key_and_cert'] = cfg.get_str("SSL", "server_key_and_cert")
+        srpm_server_certs['ca_cert'] = cfg.get_str("SSL", "ca_cert")
+        srpm_server_certs['peer_ca_cert'] = cfg.get_str("SSL", "ca_cert")
     srpm_server = HTTPServer.PlgHTTPServerManager((hostname, SRPM_SERVER_PORT), http_dir, srpm_server_certs)
     srpm_server.start()
 
-    print "Build Server accepting requests on %s:%d.\n" % (hostname, UI_PORT)
+    print "Build Server accepting requests on %s:%d.\n" % (hostname, port)
     try:
         bm_server.serve_forever()
     except KeyboardInterrupt:


--- CONFIG.py DELETED ---




More information about the fedora-extras-commits mailing list