extras-buildsys/server DBManager.py, NONE, 1.1 BuildMaster.py, 1.34, 1.35 BuilderManager.py, 1.15, 1.16 Config.py, 1.5, 1.6 Makefile, 1.9, 1.10 UserInterface.py, 1.54, 1.55 main.py, 1.13, 1.14

Daniel Williams (dcbw) fedora-extras-commits at redhat.com
Fri Sep 9 15:10:19 UTC 2005


Author: dcbw

Update of /cvs/fedora/extras-buildsys/server
In directory cvs-int.fedora.redhat.com:/tmp/cvs-serv5408/server

Modified Files:
	BuildMaster.py BuilderManager.py Config.py Makefile 
	UserInterface.py main.py 
Added Files:
	DBManager.py 
Log Message:
2005-09-09  Dan Williams  <dcbw at redhat.com>

    * Add support for PostgreSQL as the database backend.
        Databases are now abstracted and support for others,
        like MySQL, need to be added to server/DBManager.py.
        Should be really easy though.

    NOTE: sqlite support might be broken right now due to
        SQL statement quoting changes




--- NEW FILE DBManager.py ---
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# copyright 2005 Duke University
#
# Copyright 2005 Dan Williams <dcbw at redhat.com> and Red Hat, Inc.

import os, sys
import types
import exceptions
from plague import BaseConfig
import Config


class ResultSet:
    """ Partially taken from sqlite's python DB-API module """

    def __init__(self, result, desc):
        self.__dict__['_dbresult'] = result
        self.__dict__['_desc'] = desc
        self.__dict__['_xlatekey'] = {}

        # set up key translation of field name -> index
        for _i in range(len(desc)):
            self.__dict__['_xlatekey'][desc[_i][0].upper()] = _i

    def __getattr__(self, key):
        key = key.upper()
        if self.__dict__['_xlatekey'].has_key(key):
            return self.__dict__['_dbresult'][self.__dict__['_xlatekey'][key]]
        raise AttributeError, key

    def __len__(self):
        return len(self.__dict__['_dbresult'])

    def __getitem__(self, key):
        if isinstance(key, types.StringType):
            key = self.__dict__['_xlatekey'][key.upper()]
        return self.__dict__['_dbresult'][key]

    def __contains__(self, key):
        return self.__dict__['_xlatekey'].has_key(key.upper())

    def __repr__(self):
        return repr(self.__dict__['_dbresult'])

    def __str__(self):
        return str(self.__dict__['_dbresult'])

    def __cmp__(self, other):
        return cmp(self.__dict__['_dbresult'], other)

    def description(self):
        return self.__dict__['_desc']

    def keys(self):
        _k = []
        for _i in self.__dict__['_desc']:
            _k.append(_i[0])
        return _k

    def values(self):
        return self.__dict__['_dbresult']

    def items(self):
        _items = []
        for i in range(len(self.__dict__['_dbresult'])):
            _items.append((self.__dict__['_desc'][i][0], self.__dict__['_dbresult'][i]))
        return _items

    def has_key(self, key):
        return self.__dict__['_xlatekey'].has_key(key.upper())


class BaseDBEngineClass:
    def __init__(self, cfg):
        self._cfg = cfg
        self._dbcx = self._connect()

    def cursor(self):
        return self._dbcx.cursor()

    def commit(self):
        self._dbcx.commit()

    def get_uid_field_type(self):
        raise Exception("Need to implement this function for derived classes.")

    def get_uid_field_for_insert(self):
        raise Exception("Need to implement this function for derived classes.")

    def get_uid_value_for_insert(self):
        raise Exception("Need to implement this function for derived classes.")

    def fetchall(self, cursor):
        raise Exception("Need to implement this function for derived classes.")

    def fetchone(self, cursor):
        raise Exception("Need to implement this function for derived classes.")

class sqliteEngineClass(BaseDBEngineClass):

    """ sqlite will autoincrement the primary key on insert when you
        specify the field and a value of NULL """

    def get_uid_field_type(self):
        return "INTEGER PRIMARY KEY"

    def get_uid_field_for_insert(self):
        return "uid,"

    def get_uid_value_for_insert(self):
        return "NULL,"

    def fetchall(self, cursor):
        """ sqlite returns the items in a dict-like class already """
        return cursor.fetchall()

    def fetchone(self, cursor):
        """ sqlite returns the items in a dict-like class already """
        return cursor.fetchone()

    def _connect(self):
        try:
            dbloc = self._cfg.get_str("sqlite Engine", "database")
        except BaseConfig.ConfigError, e:
            print "Error: bad sqlite config options.  '%s'" % e
            os._exit(1)

        try:
            tm = self._cfg.get_int("sqlite Engine", "timeout")
        except BaseConfig.ConfigError, e:
            tm = 3

        try:
            dbcx = sqlite.connect(dbloc, encoding="utf-8", timeout=tm)
        except StandardError, e:
            print "Error: %s" % e
            os._exit(1)
        return dbcx


class pgdbEngineClass(BaseDBEngineClass):

    """ postgres will autoincrement SERIAL fields on insert automatically,
        you don't need to specify its value in INSERT statements """

    def get_uid_field_type(self):
        return "SERIAL PRIMARY KEY"

    def get_uid_field_for_insert(self):
        return ""

    def get_uid_value_for_insert(self):
        return ""

    def fetchall(self, cursor):
        """ We need to convert the returned data to a ResultSet object so
            we can access it as a dict 
        """
        rows = cursor.fetchall()
        return [ResultSet(row, cursor.description) for row in rows]

    def fetchone(self, cursor):
        """ We need to convert the returned data to a ResultSet object so
            we can access it as a dict 
        """
        row = cursor.fetchone()
        if not row:
            return None
        return ResultSet(row, cursor.description)

    def _connect(self):
        try:
            host = self._cfg.get_str("pgdb Engine", "host")
            database = self._cfg.get_str("pgdb Engine", "database")
            user = self._cfg.get_str("pgdb Engine", "user")
            password = self._cfg.get_str("pgdb Engine", "password")
        except BaseConfig.ConfigError, e:
            print "Error: bad pgdb config options.  '%s'" % e
            os._exit(1)

        try:
            dbcx = pgdb.connect(host=host, database=database, user=user, password=password)
        except StandardError, e:
            print "Error: %s" % e
            os._exit(1)

        return dbcx




db_engines = {
    'sqlite':   sqliteEngineClass,
    'pgdb':    pgdbEngineClass
}

# Import our database engines, if any engine
# isn't present, remove its class entry from
# the db engine dict
try:
    import sqlite
except ImportError, e:
    db_engines['sqlite'] = None

try:
    import pgdb
except ImportError, e:
    db_engines['pgdb'] = None



class DBManager:
    def __init__(self, cfg):
        self._cfg = cfg
        try:
            engine = self._cfg.get_str("Database", "engine")
            self._db_engine_class = db_engines[engine]
        except KeyError:
            print "Error: unrecognized database engine '%s'.  Must be " \
                    "one of: %s" % (engine, db_engines.keys())
            os._exit(1)

        if not self._db_engine_class:
            print "Error: database engine '%s' could not be loaded.  " \
                    "Perhaps you need the python module for it?" % engine
            os._exit(1)

        print "Using database engine %s." % engine

        # Make sure our database structure is what we want
        dbcx = self.dbcx()
        if not dbcx:
            os._exit(1)
        self._ensure_tables(dbcx)
        del dbcx

    def dbcx(self):
        try:
            dbcx = self._db_engine_class(self._cfg)
        except StandardError, e:
            dbcx = None
            print "DB Error: %s" % e

        return dbcx

    def _ensure_tables(self, dbcx):
        """ Create database tables if they don't exist """

        curs = dbcx.cursor()
        try:
            curs.execute('SELECT * FROM jobs LIMIT 4')
        except Exception, e:
            # If table wasn't created, try to create it
            try:
                uid_type = dbcx.get_uid_field_type()
                dbcx.commit()
                curs.execute('CREATE TABLE jobs ('  \
                    'uid %s, '                      \
                    'username VARCHAR(20), '        \
                    'package VARCHAR(50), '         \
                    'source VARCHAR(255), '         \
                    'target_distro VARCHAR(20), '   \
                    'target_target VARCHAR(20), '   \
                    'target_repo VARCHAR(20), '     \
                    'buildreq VARCHAR(75), '        \
                    'starttime BIGINT, '            \
                    'endtime BIGINT, '              \
                    'status VARCHAR(15), '          \
                    'result VARCHAR(15), '          \
                    'epoch VARCHAR(4), '            \
                    'version VARCHAR(25), '         \
                    'release VARCHAR(25), '         \
                    'archlist VARCHAR(75), '        \
                    'result_msg TEXT'               \
                    ')' % uid_type)
                dbcx.commit()
            except StandardError, e:
                print "Could not access the job database.  Reason: '%s'.  Exiting..." % e
                os._exit(1)

        # Create the archjobs table
        try:
            curs.execute('SELECT * FROM archjobs LIMIT 4')
        except Exception, e:
            # If table wasn't created, try to create it
            try:
                dbcx.commit()
                curs.execute('CREATE TABLE archjobs ('  \
                    'jobid VARCHAR(40) PRIMARY KEY, '   \
                    'parent_uid INTEGER, '              \
                    'starttime BIGINT, '                \
                    'endtime BIGINT, '                  \
                    'arch VARCHAR(15), '                \
                    'builder_addr VARCHAR(100), '       \
                    'status VARCHAR(15), '              \
                    'builder_status VARCHAR(15)'        \
                    ')')
                dbcx.commit()
            except StandardError, e:
                print "Could not access the job database.  Reason: '%s'.  Exiting..." % e
                os._exit(1)



Index: BuildMaster.py
===================================================================
RCS file: /cvs/fedora/extras-buildsys/server/BuildMaster.py,v
retrieving revision 1.34
retrieving revision 1.35
diff -u -r1.34 -r1.35
--- BuildMaster.py	29 Aug 2005 17:48:19 -0000	1.34
+++ BuildMaster.py	9 Sep 2005 15:10:17 -0000	1.35
@@ -18,7 +18,7 @@
 
 import time
 import PackageJob
-import sqlite
+import DBManager
 import threading
 import os
 import Repo
@@ -26,85 +26,13 @@
 import Config
 
 
-# Load in the config
-CONFIG_LOCATION = "/etc/plague/server/"
-
-
-def ensure_job_db_tables(dbcx):
-    """ Central routine to create the database table structure """
-
-    curs = dbcx.cursor()
-    try:
-        curs.execute('SELECT * FROM jobs')
-        dbcx.commit()
-    except Exception, e:
-        # If DB wasn't created, try to create it
-        try:
-            curs.execute('CREATE TABLE jobs ('  \
-                'uid INTEGER PRIMARY KEY, '     \
-                'username VARCHAR(20), '        \
-                'package VARCHAR(50), '         \
-                'source VARCHAR(255), '         \
-                'target_distro VARCHAR(20), '   \
-                'target_target VARCHAR(20), '   \
-                'target_repo VARCHAR(20), '     \
-                'buildreq VARCHAR(75), '        \
-                'starttime BIGINT, '            \
-                'endtime BIGINT, '              \
-                'status VARCHAR(15), '          \
-                'result VARCHAR(15), '          \
-                'epoch VARCHAR(4), '            \
-                'version VARCHAR(25), '         \
-                'release VARCHAR(25), '         \
-                'archlist VARCHAR(75), '        \
-                'result_msg TEXT'               \
-                ')')
-        except sqlite.OperationalError, e:
-            print "Could not access the job database.  Reason: '%s'.  Exiting..." % e
-            os._exit(1)
-        except sqlite.DatabaseError, e:
-            s = "%s" % e
-            if s != "table jobs already exists":
-                print "Could not access the job database.  Reason: '%s'.  Exiting..." % e
-                os._exit(1)
-        else:
-            dbcx.commit()
-
-    # Create the archjobs table
-    try:
-        curs.execute('SELECT * FROM archjobs')
-        dbcx.commit()
-    except Exception, e:
-        # If DB wasn't created, try to create it
-        try:
-            curs.execute('CREATE TABLE archjobs ('  \
-                'jobid VARCHAR(40) PRIMARY KEY, '   \
-                'parent_uid INTEGER, '              \
-                'starttime BIGINT, '                \
-                'endtime BIGINT, '                  \
-                'arch VARCHAR(15), '                \
-                'builder_addr VARCHAR(100), '       \
-                'status VARCHAR(15), '              \
-                'builder_status VARCHAR(15)'        \
-                ')')
-        except sqlite.OperationalError, e:
-            print "Could not access the job database.  Reason: '%s'.  Exiting..." % e
-            os._exit(1)
-        except sqlite.DatabaseError, e:
-            s = "%s" % e
-            if s != "table archjobs already exists":
-                print "Could not access the job database.  Reason: '%s'.  Exiting..." % e
-                os._exit(1)
-        else:
-            dbcx.commit()
-
-
 class BuildMaster(threading.Thread):
 
     MAX_CHECKOUT_JOBS = 5
 
-    def __init__(self, builder_manager, cfg):
+    def __init__(self, builder_manager, db_manager, cfg):
         self.builder_manager = builder_manager
+        self._db_manager = db_manager
         self.hostname = cfg.get_str("General", "hostname")
         self.should_stop = False
         self._paused = False
@@ -136,22 +64,16 @@
         self._building_jobs = {}
         self._building_jobs_lock = threading.Lock()
 
-        try:
-            self.dbcx = sqlite.connect(CONFIG_LOCATION + "jobdb", encoding="utf-8", timeout=3)
-        except sqlite.DatabaseError, e:
-            print "Error: sqlite could not open the job database.  Reason: %s" % e
-            os._exit(1)
-        self.curs = self.dbcx.cursor()
-        ensure_job_db_tables(self.dbcx)
+        self._dbcx = self._db_manager.dbcx()
+        self._cursor = self._dbcx.cursor()
 
         self._requeue_interrupted_jobs()
         threading.Thread.__init__(self)
 
     def _requeue_interrupted_jobs(self):
         """ Restart interrupted jobs from our db. """
-        self.curs.execute('SELECT uid FROM jobs WHERE (status!="needsign" AND status!="failed" AND status!="finished") ORDER BY uid')
-        self.dbcx.commit()
-        uids = self.curs.fetchall()
+        self._cursor.execute("SELECT uid FROM jobs WHERE (status!='needsign' AND status!='failed' AND status!='finished') ORDER BY uid")
+        uids = self._dbcx.fetchall(self._cursor)
 
         if len(uids) == 0:
             return
@@ -178,9 +100,8 @@
         if len(uids) == 0:
             return
 
-        self.curs.execute('SELECT * FROM jobs WHERE %s ORDER BY uid' % uids)
-        self.dbcx.commit()
-        jobs = self.curs.fetchall()
+        self._cursor.execute('SELECT * FROM jobs WHERE %s ORDER BY uid' % uids)
+        jobs = self._dbcx.fetchall(self._cursor)
 
         if len(jobs) == 0:
             return
@@ -188,13 +109,13 @@
         for row in jobs:
             uid = row['uid']
             # Kill any archjobs that are left around
-            self.curs.execute('DELETE FROM archjobs WHERE parent_uid=%d' % uid)
-            self.dbcx.commit()
+            self._cursor.execute('DELETE FROM archjobs WHERE parent_uid=%d' % uid)
+            self._dbcx.commit()
 
             # Clear out old time/result information
-            self.curs.execute('UPDATE jobs SET starttime=%d, endtime=0, ' \
-                              'result_msg="" WHERE uid=%d' % (time.time(), uid))
-            self.dbcx.commit()
+            self._cursor.execute("UPDATE jobs SET starttime=%d, endtime=0, " \
+                              "result_msg='' WHERE uid=%d" % (time.time(), uid))
+            self._dbcx.commit()
 
             # Now requeue the job
             target_str = Config.make_target_string(row['target_distro'],
@@ -299,10 +220,10 @@
 
             # Update job end time
             try:
-                self.curs.execute('UPDATE jobs SET endtime=%d WHERE uid=%d' % (job.endtime, uid))
-            except sqlite.OperationalError, e:
+                self._cursor.execute('UPDATE jobs SET endtime=%d WHERE uid=%d' % (job.endtime, uid))
+            except StandardError, e:
                 print "DB Error: could not access jobs database. Reason: '%s'" % e
-            self.dbcx.commit()
+            self._dbcx.commit()
 
             print "%s (%s): Job finished." % (uid, job.package)
 
@@ -314,45 +235,42 @@
         self._done_queue_lock.release()
 
     def _write_job_status_to_db(self, uid, attrdict):
-        sql = 'status="%s"' % attrdict['status']
+        sql = "status='%s'" % attrdict['status']
         if attrdict.has_key('epoch') and attrdict.has_key('version') and attrdict.has_key('release'):
-            sql = sql + ', epoch="%s", version="%s", release="%s"' % (attrdict['epoch'],
+            sql = sql + ", epoch='%s', version='%s', release='%s'" % (attrdict['epoch'],
                     attrdict['version'], attrdict['release'])
         if attrdict.has_key('result_msg'):
             import urllib
-            sql = sql + ', result_msg="%s"' % (urllib.quote(attrdict['result_msg']))
+            sql = sql + ", result_msg='%s'" % (urllib.quote(attrdict['result_msg']))
         if attrdict.has_key('result'):
-            sql = sql + ', result="%s"' % attrdict['result']
+            sql = sql + ", result='%s'" % attrdict['result']
 
         sql = 'UPDATE jobs SET ' + sql + ' WHERE uid=%d' % uid
         try:
-            self.curs.execute(sql)
-        except sqlite.OperationalError, e:
+            self._cursor.execute(sql)
+        except StandardError, e:
             print "DB Error: could not access jobs database. Reason: '%s'" % e
-
-        self.dbcx.commit()
+        self._dbcx.commit()
 
     def _write_archjob_status_to_db(self, uid, attrdict):
-        self.curs.execute('SELECT parent_uid FROM archjobs WHERE jobid="%s"' % uid)
-        self.dbcx.commit()
-        if len(self.curs.fetchall()) == 0:
+        self._cursor.execute("SELECT parent_uid FROM archjobs WHERE jobid='%s'" % uid)
+        if len(self._dbcx.fetchall(self._cursor)) == 0:
             try:
-                self.curs.execute('INSERT INTO archjobs (jobid, parent_uid, starttime, '    \
-                    'endtime, arch, builder_addr, status, builder_status) '   \
-                    'VALUES ("%s", %d, %d, %d, "%s", "%s", "%s", "%s")' % (uid, attrdict['parent_uid'], \
+                self._cursor.execute("INSERT INTO archjobs (jobid, parent_uid, starttime, "    \
+                    "endtime, arch, builder_addr, status, builder_status) "   \
+                    "VALUES ('%s', %d, %d, %d, '%s', '%s', '%s', '%s')" % (uid, attrdict['parent_uid'], \
                     attrdict['starttime'], attrdict['endtime'], attrdict['arch'],                       \
                     attrdict['builder_addr'], attrdict['status'], attrdict['builder_status']))
-            except sqlite.OperationalError, e:
+            except StandardError, e:
                 print "DB Error: could not access jobs database. Reason: '%s'" % e
         else:            
             try:
-                self.curs.execute('UPDATE archjobs SET status="%s", builder_status="%s", endtime=%d ' \
-                    'WHERE jobid="%s" AND parent_uid=%d' % (attrdict['status'], 
+                self._cursor.execute("UPDATE archjobs SET status='%s', builder_status='%s', endtime=%d " \
+                    "WHERE jobid='%s' AND parent_uid=%d" % (attrdict['status'], 
                     attrdict['builder_status'], attrdict['endtime'], uid, attrdict['parent_uid']))
-            except sqlite.OperationalError, e:
+            except StandardError, e:
                 print "DB Error: could not access jobs database. Reason: '%s'" % e
-
-        self.dbcx.commit()
+        self._dbcx.commit()
 
     def _save_job_status(self):
         # Write new job status to the database
@@ -373,27 +291,28 @@
     def _start_new_jobs(self):
         self._new_queue_lock.acquire()
 
+        uid_field = self._dbcx.get_uid_field_for_insert()
+        uid_value = self._dbcx.get_uid_value_for_insert()
         for item in self._new_queue:
-            self.curs.execute('INSERT INTO jobs (uid, username, package,' \
-                    ' source, target_distro, target_target, target_repo, buildreq,' \
-                    ' starttime, endtime, status, result)' \
-                    ' VALUES (NULL, "%s", "%s", "%s", "%s", "%s", "%s", "%s", %d, 0, "%s", "")' \
-                    % (item['email'], item['package'], item['source'], item['target_distro'], \
+            self._cursor.execute("INSERT INTO jobs (%s username, package," \
+                    " source, target_distro, target_target, target_repo, buildreq," \
+                    " starttime, endtime, status, result)" \
+                    " VALUES (%s '%s', '%s', '%s', '%s', '%s', '%s', '%s', %d, 0, '%s', '')" \
+                    % (uid_field, uid_value, item['email'], item['package'], item['source'], item['target_distro'], \
                     item['target_target'], item['target_repo'], item['buildreq'], \
                     item['time'], 'initialize'))
-            self.dbcx.commit()
+            self._dbcx.commit()
 
             # Find the UID
-            self.curs.execute('SELECT uid FROM jobs WHERE username="%s" AND' \
-                    ' package="%s" AND source="%s" AND target_distro="%s" AND' \
-                    ' target_target="%s" AND target_repo = "%s" AND' \
-                    ' buildreq="%s" AND starttime=%d AND status="initialize"' \
+            self._cursor.execute("SELECT uid FROM jobs WHERE username='%s' AND" \
+                    " package='%s' AND source='%s' AND target_distro='%s' AND" \
+                    " target_target='%s' AND target_repo = '%s' AND" \
+                    " buildreq='%s' AND starttime=%d AND status='initialize'" \
                     % (item['email'], item['package'], item['source'], \
                     item['target_distro'], item['target_target'], item['target_repo'], \
                     item['buildreq'], item['time']))
-            self.dbcx.commit()
 
-            data = self.curs.fetchall()
+            data = self._dbcx.fetchall(self._cursor)
             # If two of the same job are submitted close together, we need
             # to make sure we pick the last result to get the correct one
             row = data[len(data) - 1]


Index: BuilderManager.py
===================================================================
RCS file: /cvs/fedora/extras-buildsys/server/BuilderManager.py,v
retrieving revision 1.15
retrieving revision 1.16
diff -u -r1.15 -r1.16
--- BuilderManager.py	31 Aug 2005 19:55:30 -0000	1.15
+++ BuilderManager.py	9 Sep 2005 15:10:17 -0000	1.16
@@ -153,7 +153,6 @@
 
             # Use the builder with the most free build slots
             builder_list.sort(key=self._builder_sort_func, reverse=True)
-            print builder_list
             for builder in builder_list:
                 try:
                     job = builder.start_job(parent, req['target_dict'], req['srpm_url'])


Index: Config.py
===================================================================
RCS file: /cvs/fedora/extras-buildsys/server/Config.py,v
retrieving revision 1.5
retrieving revision 1.6
diff -u -r1.5 -r1.6
--- Config.py	31 Aug 2005 14:13:03 -0000	1.5
+++ Config.py	9 Sep 2005 15:10:17 -0000	1.6
@@ -125,6 +125,20 @@
         self.set_option("UI", "guest_allowed", "yes")
         self.set_option("UI", "log_url", "http://www.foo.com/logs/")
 
+        self.add_section("Database")
+        self.set_option("Database", "engine", "sqlite")
+
+        self.add_section("sqlite Engine")
+        self.set_option("sqlite Engine", "database", "/etc/plague/server/jobdb")
+        self.set_option("sqlite Engine", "timeout", "3")
+
+        self.add_section("pgdb Engine")
+        self.set_option("pgdb Engine", "host", "localhost")
+        self.set_option("pgdb Engine", "database", "plague")
+        self.set_option("pgdb Engine", "user", "plague")
+        self.set_option("pgdb Engine", "password", "")
+
+
         self.save()
 
 


Index: Makefile
===================================================================
RCS file: /cvs/fedora/extras-buildsys/server/Makefile,v
retrieving revision 1.9
retrieving revision 1.10
diff -u -r1.9 -r1.10
--- Makefile	25 Aug 2005 18:15:14 -0000	1.9
+++ Makefile	9 Sep 2005 15:10:17 -0000	1.10
@@ -15,6 +15,7 @@
 	Builder.py \
 	BuildMaster.py \
 	Config.py \
+	DBManager.py \
 	EmailUtils.py \
 	PackageJob.py \
 	Repo.py \


Index: UserInterface.py
===================================================================
RCS file: /cvs/fedora/extras-buildsys/server/UserInterface.py,v
retrieving revision 1.54
retrieving revision 1.55
diff -u -r1.54 -r1.55
--- UserInterface.py	1 Sep 2005 20:36:12 -0000	1.54
+++ UserInterface.py	9 Sep 2005 15:10:17 -0000	1.55
@@ -15,7 +15,6 @@
 # Copyright 2005 Dan Williams <dcbw at redhat.com> and Red Hat, Inc.
 
 
-import sqlite
 import EmailUtils
 import time
 import sys
@@ -25,22 +24,14 @@
 import BuildMaster
 import PackageJob
 import Config
+import DBManager
 from plague import AuthedXMLRPCServer
 
 
-CONFIG_LOCATION = "/etc/plague/server/"
-
 # API version #, just increment each time an incompatible API change is made
 XMLRPC_API_VERSION = 100
 
 
-def get_dbcx():
-    dbcx = None
-    dbcx = sqlite.connect(CONFIG_LOCATION + "jobdb", encoding="utf-8", timeout=3)
-    curs = dbcx.cursor()
-    return (dbcx, curs)
-
-
 def validate_email(email):
     safe_list = ['@', '_', '-', '.', '+']
     for c in email:
@@ -101,9 +92,10 @@
     Base UserInterface class. NO AUTHENTICATION.  Subclass this to provide some.
     """
 
-    def __init__(self, builder_manager, build_master, cfg):
+    def __init__(self, builder_manager, build_master, db_manager, cfg):
         self._builder_manager = builder_manager
         self._bm = build_master
+        self._db_manager = db_manager
         self._cfg = cfg
 
     def email_result(self, to, source, resultstring):
@@ -185,25 +177,28 @@
         if not uid:
             return (-1, "Error: Invalid job UID.")
 
-        sql = 'SELECT uid, username, status, result FROM jobs WHERE uid=%d' % uid
+        sql = "SELECT uid, username, status, result FROM jobs WHERE uid=%d" % uid
 
         # Run the query for the job
         try:
-            dbcx, curs = get_dbcx()
-        except sqlite.DatabaseError, e:
-            return (-1, "Unable to access job database.")
+            dbcx = self._db_manager.dbcx()
+            curs = dbcx.cursor()
+        except StandardError, e:
+            return (-1, "Unable to access job database: '%s'" % e)
         curs.execute(sql)
-        job = curs.fetchone()
+        job = dbcx.fetchone(curs)
+        result = None
         if not job:
-            return (-1, "Error: Invalid job UID.")
-
-        # Ensure the job failed or was killed
-        if job['result'] != 'failed' and job['result'] != 'killed':
-            return (-1, "Error: Job %d must be either 'failed' or 'killed' to requeue." % uid)
+            result = (-1, "Error: Invalid job UID.")
+        elif job['result'] != 'failed' and job['result'] != 'killed':
+            result = (-1, "Error: Job %d must be either 'failed' or 'killed' to requeue." % uid)
+        else:
+            self._bm.requeue_job(uid)
+            resutl = (0, "Success: Job %d has been requeued." % uid)
 
-        self._bm.requeue_job(uid)
-        return (0, "Success: Job %d has been requeued." % uid)
-        
+        del cursor
+        del dbcx
+        return result        
 
     def _kill_job(self, email, job, jobid):
         if not job:
@@ -216,13 +211,13 @@
     def list_jobs(self, args_dict):
         """ Query job information and return it to the user """
 
-        sql = 'SELECT uid, username, package, source, target_distro, target_target, ' \
-                    'target_repo, starttime, endtime, status, result FROM jobs WHERE '
+        sql = "SELECT uid, username, package, source, target_distro, target_target, " \
+                    "target_repo, starttime, endtime, status, result FROM jobs WHERE "
         sql_args = []
 
         if args_dict.has_key('email') and args_dict['email']:
             if validate_email(args_dict['email']):
-                sql_args.append('username LIKE "%%%s%%"' % args_dict['email'])
+                sql_args.append("username LIKE '%%%s%%'" % args_dict['email'])
             else:
                 return (-1, "Error: Invalid email address.", [])
 
@@ -235,16 +230,16 @@
                 if not PackageJob.is_package_job_stage_valid(status):
                     return (-1, "Error: Invalid job status.", [])
                 if len(status_sql) > 0:
-                    status_sql = status_sql + ' OR status="%s"' % status
+                    status_sql = status_sql + " OR status='%s'" % status
                 else:
-                    status_sql = 'status="%s"' % status
+                    status_sql = "status='%s'" % status
             status_sql = '(' + status_sql + ')'
             sql_args.append(status_sql)
 
         if args_dict.has_key('result') and args_dict['result']:
             result = args_dict['result']
             if PackageJob.is_package_job_result_valid(result):
-                sql_args.append('result="%s"' % result)
+                sql_args.append("result='%s'" % result)
             else:
                 return (-1, "Error: Invalid job result.", [])
 
@@ -271,9 +266,9 @@
             if not target_cfg:
                 return (-1, "Error: Invalid target.", [])
 
-            sql_args.append('target_distro="%s"' % target_cfg.distro())
-            sql_args.append('target_target="%s"' % target_cfg.target())
-            sql_args.append('target_repo="%s"' % target_cfg.repo())
+            sql_args.append("target_distro='%s'" % target_cfg.distro())
+            sql_args.append("target_target='%s'" % target_cfg.target())
+            sql_args.append("target_repo='%s'" % target_cfg.repo())
 
         if not len(sql_args):
             return (-1, "Error: Invalid query.", [])
@@ -320,11 +315,12 @@
 
         # Run the query for the job
         try:
-            dbcx, curs = get_dbcx()
-        except sqlite.DatabaseError, e:
-            return (-1, "Unable to access job database.", [])
+            dbcx = self._db_manager.dbcx()
+            curs = dbcx.cursor()
+        except StandardError, e:
+            return (-1, "Unable to access job database: '%s'" % e, [])
         curs.execute(sql)
-        data = curs.fetchall()
+        data = dbcx.fetchall(curs)
         jobs = []
         for row in data:
             jobrec = {}
@@ -355,7 +351,7 @@
             sql = "SELECT jobid, parent_uid, starttime, endtime, arch, builder_addr, "  \
                     "status, builder_status FROM archjobs WHERE " + uids
             curs.execute(sql)
-            data = curs.fetchall()
+            data = dbcx.fetchall(curs)
             for row in data:
                 ajrec = {}
                 ajrec['jobid'] = row['jobid']
@@ -372,7 +368,6 @@
 
         del curs
         del dbcx
-
         return (0, "Success.", jobs)
 
 
@@ -389,12 +384,15 @@
 
         # Run the query for the job
         try:
-            dbcx, curs = get_dbcx()
-        except sqlite.DatabaseError, e:
-            return (-1, "Unable to access job database.", {})
+            dbcx = self._db_manager.dbcx()
+            curs = dbcx.cursor()
+        except StandardError, e:
+            return (-1, "Unable to access job database: '%s'" % e, {})
         curs.execute(sql)
-        job = curs.fetchone()
+        job = dbcx.fetchone(curs)
         if not job:
+            del curs
+            del dbcx
             return (-1, "Error: Invalid job UID.", {})
         jobrec = {}
         jobrec['uid'] = job['uid']
@@ -426,7 +424,7 @@
         sql = "SELECT jobid, parent_uid, starttime, endtime, arch, builder_addr, "    \
                 "status, builder_status FROM archjobs WHERE parent_uid=%d " % uid
         curs.execute(sql)
-        data = curs.fetchall()
+        data = dbcx.fetchall(curs)
         for row in data:
             ajrec = {}
             ajrec['jobid'] = row['jobid']
@@ -439,10 +437,10 @@
             ajrec['builder_status'] = row['builder_status']
             jobrec['archjobs'].append(ajrec)
 
+        ret_job = copy.deepcopy(jobrec)
+
         del curs
         del dbcx
-
-        ret_job = copy.deepcopy(jobrec)
         return (0, "Success.", ret_job)
 
 
@@ -489,14 +487,18 @@
                 uids = uids + " OR uid=%d" % uid
 
         if len(uids):
-            sql = 'UPDATE jobs SET status="finished" WHERE %s' % uids
+            sql = "UPDATE jobs SET status='finished' WHERE %s" % uids
 
             try:
-                dbcx, curs = get_dbcx()
-            except sqlite.DatabaseError, e:
-                return (-1, "Unable to access job database.")
+                dbcx = self._db_manager.dbcx()
+                curs = dbcx.cursor()
+            except StandardError, e:
+                return (-1, "Unable to access job database: '%s'" % e)
             curs.execute(sql)
             dbcx.commit()
+            del curs
+            del dbcx
+
         return (0, "Success.")
 
     def srpm_upload_dir(self, target_alias):
@@ -536,20 +538,25 @@
 
         # Run the query for the job
         try:
-            dbcx, curs = get_dbcx()
-        except sqlite.DatabaseError, e:
-            return (-1, "Unable to access job database.")
+            dbcx = self._db_manager.dbcx()
+            curs = dbcx.cursor()
+        except StandardError, e:
+            return (-1, "Unable to access job database: '%s'" % e)
+
         curs.execute(sql)
-        job = curs.fetchone()
-        if not job:
-            return (-1, "Error: Invalid job UID.")
+        job = dbcx.fetchone(curs)
 
-        # Ensure matching usernames
-        if job['username'] != user.email and not user.job_admin:
-            return (-1, "Error: You are not the original submitter for Job %d." % uid)
-            
-        return UserInterface.requeue(self, uid)
+        result = None
+        if not job:
+            result = (-1, "Error: Invalid job UID.")
+        elif job['username'] != user.email and not user.job_admin:
+            result = (-1, "Error: You are not the original submitter for Job %d." % uid)
+        else:
+            result = UserInterface.requeue(self, uid)
 
+        del curs
+        del dbcx
+        return result
 
     def kill_job(self, email, jobid):
         user = AuthedXMLRPCServer.get_authinfo()
@@ -620,31 +627,44 @@
         sql = 'SELECT uid, username, status FROM jobs WHERE %s' % uids
 
         try:
-            dbcx, curs = get_dbcx()
-        except sqlite.DatabaseError, e:
-            return (-1, "Unable to access job database.")
+            dbcx = self._db_manager.dbcx()
+            curs = dbcx.cursor()
+        except StandardError, e:
+            return (-1, "Unable to access job database: '%s'" % e)
+
         curs.execute(sql)
-        data = curs.fetchall()
+        data = dbcx.fetchall(curs)
 
         # Ensure that the user can actually finish the jobs they requested
         final_uid_list = []
+        error = None
         for row in data:
             uid = row['uid']
             username = row['username']
             status = row['status']
 
             if status != 'needsign' and status != 'failed':
-                return (-1, "Error: Job %d must be either 'needsign' or 'failed' to finish it." % uid)
+                error = (-1, "Error: Job %d must be either 'needsign' or 'failed' to finish it." % uid)
+                break
 
             # Marking 'needsign' jobs as finished requires admin privs
             if status == 'needsign' and not user.job_admin:
-                return (-1, "Insufficient privileges to finish job %d." % uid)
+                error = (-1, "Insufficient privileges to finish job %d." % uid)
+                break
+
             # 'failed' jobs can only be finished by the job owner or a job_admin user
             if status == 'failed' and user != user.email and not user.job_admin:
-                return (-1, "Insufficient privileges to finish job %d." % uid)
+                error = (-1, "Insufficient privileges to finish job %d." % uid)
+                break
 
             final_uid_list.append(uid)
 
+        del curs
+        del dbcx
+
+        if error:
+            return error
+
         return UserInterface.finish(self, final_uid_list)
 
     def srpm_upload_dir(self, target_alias):


Index: main.py
===================================================================
RCS file: /cvs/fedora/extras-buildsys/server/main.py,v
retrieving revision 1.13
retrieving revision 1.14
diff -u -r1.13 -r1.14
--- main.py	25 Aug 2005 18:15:14 -0000	1.13
+++ main.py	9 Sep 2005 15:10:17 -0000	1.14
@@ -30,6 +30,7 @@
 import User
 import BuildMaster
 import BuilderManager
+import DBManager
 import Config
 from UserInterface import UserInterfaceSSLAuth
 from UserInterface import UserInterfaceNoAuth
@@ -95,11 +96,13 @@
         print "You need at least one target to do anything useful."
         sys.exit(3)
 
+    dbm = DBManager.DBManager(cfg)
+
     builder_manager = BuilderManager.BuilderManager(cfg)
 
     # Create the BuildMaster thread
     hostname = cfg.get_str("General", "hostname")
-    bm = BuildMaster.BuildMaster(builder_manager, cfg)
+    bm = BuildMaster.BuildMaster(builder_manager, dbm, cfg)
     bm.start()
 
     # Create the BuildMaster XMLRPC server
@@ -111,10 +114,10 @@
             ui_certs['key_and_cert'] = cfg.get_str("SSL", "server_key_and_cert")
             ui_certs['ca_cert'] = cfg.get_str("SSL", "ca_cert")
             ui_certs['peer_ca_cert'] = cfg.get_str("UI", "client_ca_cert")
-            ui = UserInterfaceSSLAuth(builder_manager, bm, cfg)
+            ui = UserInterfaceSSLAuth(builder_manager, bm, dbm, cfg)
             bm_server = AuthenticatedSSLXMLRPCServer((hostname, port), ui_certs, cfg)
         else:
-            ui = UserInterfaceNoAuth(builder_manager, bm, cfg)
+            ui = UserInterfaceNoAuth(builder_manager, bm, dbm, cfg)
             bm_server = AuthedXMLRPCServer.AuthedXMLRPCServer((hostname, port))
     except socket.error, e:
         if e[0] == 98:      # Address already in use




More information about the fedora-extras-commits mailing list