extras-buildsys/server ArchJob.py, 1.12, 1.13 BuildMaster.py, 1.32, 1.33 Builder.py, 1.12, 1.13 BuilderManager.py, 1.12, 1.13 Config.py, 1.1, 1.2 Makefile, 1.8, 1.9 PackageJob.py, 1.23, 1.24 Repo.py, 1.13, 1.14 UserInterface.py, 1.48, 1.49 main.py, 1.12, 1.13
Daniel Williams (dcbw)
fedora-extras-commits at redhat.com
Thu Aug 25 18:15:16 UTC 2005
Author: dcbw
Update of /cvs/fedora/extras-buildsys/server
In directory cvs-int.fedora.redhat.com:/tmp/cvs-serv3976/server
Modified Files:
ArchJob.py BuildMaster.py Builder.py BuilderManager.py
Config.py Makefile PackageJob.py Repo.py UserInterface.py
main.py
Log Message:
2005-08-25 Dan Williams <dcbw at redhat.com>
* Initial commit of reworked stuff:
- Each target gets separate config files on builder
and server
- Builders now run multiple jobs per builder instance
- Config files now ConfigParser based
- Target specifications are richer and require distro,
target, and repo names
- Builder's supported arches are autodetermined
- Various database fields renamed and/or removed
IT DOESN'T WORK YET
Index: ArchJob.py
===================================================================
RCS file: /cvs/fedora/extras-buildsys/server/ArchJob.py,v
retrieving revision 1.12
retrieving revision 1.13
diff -u -r1.12 -r1.13
--- ArchJob.py 15 Aug 2005 16:00:08 -0000 1.12
+++ ArchJob.py 25 Aug 2005 18:15:14 -0000 1.13
@@ -29,7 +29,7 @@
class ArchJob:
""" Tracks a single build instance for a single arch on a builder """
- def __init__(self, builder, cfg, server, par_job, jobid, target, arch):
+ def __init__(self, builder, cfg, server, par_job, jobid, target_dict):
self.par_job = par_job
self.builder = builder
self._server = server
@@ -38,8 +38,7 @@
self.status = 'running'
self.builder_status = ''
self._failure_noticed = False
- self.target = target
- self.arch = arch
+ self._target_dict = target_dict
self._builder_gone = False
self.downloads = {}
self.starttime = time.time()
@@ -88,14 +87,13 @@
host_port, path = urllib.splithost(addr)
host, port = urllib.splitport(host_port)
attrdict['builder_addr'] = host
- attrdict['builder_port'] = port
attrdict['status'] = self.status
attrdict['builder_status'] = self.builder_status
attrdict['starttime'] = self.starttime
attrdict['endtime'] = self.endtime
return attrdict
- def set_builder_status(self, status):
+ def set_status(self, status):
if status != 'idle':
oldstatus = self.builder_status
self.builder_status = status
Index: BuildMaster.py
===================================================================
RCS file: /cvs/fedora/extras-buildsys/server/BuildMaster.py,v
retrieving revision 1.32
retrieving revision 1.33
diff -u -r1.32 -r1.33
--- BuildMaster.py 15 Aug 2005 19:42:26 -0000 1.32
+++ BuildMaster.py 25 Aug 2005 18:15:14 -0000 1.33
@@ -44,8 +44,10 @@
'uid INTEGER PRIMARY KEY, ' \
'username VARCHAR(20), ' \
'package VARCHAR(50), ' \
- 'cvs_tag VARCHAR(255), ' \
- 'target VARCHAR(20), ' \
+ 'source VARCHAR(255), ' \
+ 'target_distro VARCHAR(20), ' \
+ 'target_target VARCHAR(20), ' \
+ 'target_repo VARCHAR(20), ' \
'buildreq VARCHAR(75), ' \
'starttime BIGINT, ' \
'endtime BIGINT, ' \
@@ -82,7 +84,6 @@
'endtime BIGINT, ' \
'arch VARCHAR(15), ' \
'builder_addr VARCHAR(100), ' \
- 'builder_port VARCHAR(6), ' \
'status VARCHAR(15), ' \
'builder_status VARCHAR(15)' \
')')
@@ -113,7 +114,8 @@
repodir = self._cfg.get_str("Directories", "repo_dir")
for target_cfg in self._cfg.targets():
repo = Repo.Repo(target_cfg, repodir, builder_manager)
- self._repos[repo.target()] = repo
+ target_str = target_cfg.target_string()
+ self._repos[target_str] = repo
repo.start()
self._done_queue = []
@@ -195,16 +197,18 @@
self.dbcx.commit()
# Now requeue the job
+ target_str = Config.make_target_string(row['target_distro'],
+ row['target_target'], row['target_repo'])
try:
- repo = self._repos[row['target']]
+ repo = self._repos[target_str]
except KeyError:
print "%s (%s): Target '%s' not found." % (uid,
- row['package'], row['target'])
+ row['package'], target_str)
else:
job = PackageJob.PackageJob(uid, row['username'], row['package'],
- row['cvs_tag'], repo, self)
+ row['source'], repo, self)
print "%s (%s): Restarting '%s' on target '%s'" % (uid,
- row['package'], row['cvs_tag'], row['target'])
+ row['package'], row['source'], target_str)
self._building_jobs_lock.acquire()
self._building_jobs[uid] = job
self._building_jobs_lock.release()
@@ -214,11 +218,13 @@
for repo in self._repos.values():
repo.stop()
- def create_job_request(self, email, package, source, target, buildreq, time):
+ def create_job_request(self, email, package, source, target_dict, buildreq, time):
req = {}
req['email'] = email
req['package'] = package
- req['target'] = target
+ req['target_distro'] = target_dict['distro']
+ req['target_target'] = target_dict['target']
+ req['target_repo'] = target_dict['repo']
req['buildreq'] = buildreq
req['time'] = time
req['source'] = source
@@ -332,11 +338,10 @@
if len(self.curs.fetchall()) == 0:
try:
self.curs.execute('INSERT INTO archjobs (jobid, parent_uid, starttime, ' \
- 'endtime, arch, builder_addr, builder_port, status, builder_status) ' \
- 'VALUES ("%s", %d, %d, %d, "%s", "%s", "%s", "%s", "%s")' % (uid, attrdict['parent_uid'], \
+ 'endtime, arch, builder_addr, status, builder_status) ' \
+ 'VALUES ("%s", %d, %d, %d, "%s", "%s", "%s", "%s")' % (uid, attrdict['parent_uid'], \
attrdict['starttime'], attrdict['endtime'], attrdict['arch'], \
- attrdict['builder_addr'], attrdict['builder_port'], attrdict['status'], \
- attrdict['builder_status']))
+ attrdict['builder_addr'], attrdict['status'], attrdict['builder_status']))
except sqlite.OperationalError, e:
print "DB Error: could not access jobs database. Reason: '%s'" % e
else:
@@ -371,17 +376,21 @@
for item in self._new_queue:
self.curs.execute('INSERT INTO jobs (uid, username, package,' \
- ' cvs_tag, target, buildreq, starttime, endtime, status, result)' \
+ ' source, target_distro, target_target, target_repo, buildreq,' \
+ ' starttime, endtime, status, result)' \
' VALUES (NULL, "%s", "%s", "%s", "%s", "%s", %d, 0, "%s", "")' \
- % (item['email'], item['package'], item['source'], item['target'], \
- item['buildreq'], item['time'], 'initialize'))
+ % (item['email'], item['package'], item['source'], item['target_distro'], \
+ item['target_target'], item['target_repo'], item['buildreq'], \
+ item['time'], 'initialize'))
self.dbcx.commit()
# Find the UID
self.curs.execute('SELECT uid FROM jobs WHERE username="%s" AND' \
- ' package="%s" AND cvs_tag="%s" AND target="%s" AND' \
+ ' package="%s" AND source="%s" AND target_distro="%s" AND' \
+ ' target_target="%s" AND target_repo = "%s" AND' \
' buildreq="%s" AND starttime=%d AND status="initialize"' \
- % (item['email'], item['package'], item['source'], item['target'], \
+ % (item['email'], item['package'], item['source'], \
+ item['target_disro'], item['target_target'], item['target_repo'], \
item['buildreq'], item['time']))
self.dbcx.commit()
@@ -389,12 +398,14 @@
# If two of the same job are submitted close together, we need
# to make sure we pick the last result to get the correct one
row = data[len(data) - 1]
- repo = self._repos[item['target']]
+ target_str = Config.make_target_string(item['target_distro'],
+ item['target_target'], item['target_repo'])
+ repo = self._repos[target_str]
job = PackageJob.PackageJob(row['uid'], item['email'], item['package'],
item['source'], repo, self)
print "%s (%s): Starting tag '%s' on target '%s'" % (row['uid'], \
- item['package'], item['source'], item['target'])
+ item['package'], item['source'], target_str)
item['uid'] = row['uid']
item['uid_avail'] = True
Index: Builder.py
===================================================================
RCS file: /cvs/fedora/extras-buildsys/server/Builder.py,v
retrieving revision 1.12
retrieving revision 1.13
diff -u -r1.12 -r1.13
--- Builder.py 15 Aug 2005 03:18:20 -0000 1.12
+++ Builder.py 25 Aug 2005 18:15:14 -0000 1.13
@@ -39,12 +39,13 @@
self._cur_jobid = None
self._manager = manager
self._jobs = {}
+ self._free_slots = 0
self._address = address
self._alive = True
self._stop = False
self._prepping_jobs = False
self._unavail_count = 0
- self._target_arches = {}
+ self._target_list = []
self._ping_timeout = 0
self._cur_ping_interval = self._BUILDER_PING_INTERVAL
self._when_died = 0
@@ -61,41 +62,58 @@
self._server = XMLRPCServerProxy.PlgXMLRPCServerProxy(self._address, None)
self._server_lock = threading.Lock()
- (self._alive, arches) = self._ping_builder()
+ (self._alive, target_list) = self._ping_builder()
if self._alive:
- self._init_builder(arches)
+ self._init_builder(target_list)
threading.Thread.__init__(self)
- def _init_builder(self, arches):
- self._target_arches = arches
- for target in self._target_arches.keys():
- self._target_arches[target].append('noarch')
+ def _init_builder(self, target_list):
+ self._target_list = target_list
# Kill any jobs currently running on the builder
- (jobid, status) = self._get_cur_job_and_status()
- if jobid and jobid != 0:
+ jobs = self._building_jobs()
+ for jobid in jobs.keys():
try:
self._server.die(jobid)
except:
pass
+ def _building_jobs(self):
+ jobs = {}
+ try:
+ jobs = self._server.building_jobs()
+ self._unavail_count = 0
+ except (socket.error, socket.timeout, OpenSSL.SSL.SysCallError, xmlrpclib.ProtocolError):
+ self._unavail_count = self._unavail_count + 1
+ return jobs
+
def _ping_builder(self):
- target_arches = {}
- alive = True
+ target_list = []
try:
- target_arches = self._server.supported_target_arches()
- except socket.error:
+ target_list = self._server.supported_targets()
+ alive = True
+ except (socket.error, socket.timeout, OpenSSL.SSL.SysCallError, xmlrpclib.ProtocolError):
alive = False
- return (alive, target_arches)
+ return (alive, target_list)
- def arches(self, target):
- arches = None
+ def _free_slots(self):
try:
- arches = self._target_arches[target]
- except:
- pass
- return arches
+ free_slots = self._server.free_slots()
+ except (socket.error, socket.timeout, OpenSSL.SSL.SysCallError, xmlrpclib.ProtocolError):
+ self._unavail_count = self._unavail_count + 1
+ free_slots = 0
+ return free_slots
+
+ def arches(self, target_dict):
+ for td in self._target_list:
+ if td['distro'] == target_dict['distro'] and td['target'] == target_dict['target'] and td['repo'] == target_dict['repo']:
+ arches = []
+ for arch in td['supported_arches']:
+ if not arch in arches:
+ arches.append(arch)
+ return arches
+ return None
def can_build_arch_on_target(self, arch, target):
if self._target_arches.has_key(target) and len(self._target_arches[target]) > 0:
@@ -109,21 +127,22 @@
def alive(self):
""" Is the builder responding to requests? """
return self._alive
+
+ def free_slots(self):
+ return self._free_slots
- def start_job(self, par_job, target, arch, srpm_url):
- self._server_lock.acquire()
+ def start_job(self, par_job, target_dict, srpm_url):
if not self.available():
- self._server_lock.release()
raise RuntimeError
if not self._target_arches.has_key(target) or len(self._target_arches[target]) == 0:
- self._server_lock.release()
raise RuntimeError
if not arch in self._target_arches[target]:
- self._server_lock.release()
raise RuntimeError
+ self._server_lock.acquire()
try:
- jobid = self._server.start(target, arch, srpm_url)
+ # Builder will return jobid of 0 if it can't start the job for some reason
+ jobid = self._server.start_new_job(target_dict, srpm_url)
except (socket.error, socket.timeout, OpenSSL.SSL.SysCallError, xmlrpclib.ProtocolError):
jobid = 0
@@ -131,43 +150,28 @@
self._server_lock.release()
raise RuntimeError
- use_ssl = self._server_cfg.get_bool("Builders", "use_ssl")
- job = ArchJob.ArchJob(self, self._server_cfg, self._server, par_job, jobid, target, arch)
+ job = ArchJob.ArchJob(self, self._server_cfg, self._server, par_job, jobid, target_dict)
self._jobs[jobid] = job
- self._update_cur_job()
+ self._update_building_jobs()
self._server_lock.release()
-
return job
- def _get_cur_job_and_status(self):
- jobid = None
- status = None
-
- try:
- (jobid, status) = self._server.get_cur_job()
- except (socket.error, socket.timeout, OpenSSL.SSL.SysCallError, xmlrpclib.ProtocolError):
- self._unavail_count = self._unavail_count + 1
- else:
- self._unavail_count = 0
-
- return (jobid, status)
-
- def _update_cur_job(self):
- (jobid, status) = self._get_cur_job_and_status()
+ def _update_building_jobs(self):
+ jobs = self._building_jobs()
+ self._free_slots = self.free_slots()
# Update the current job's status
if self._unavail_count == 0:
- try:
- job = self._jobs[jobid]
- job.set_builder_status(status)
- except KeyError:
- pass
- self._cur_jobid = jobid
-
- if status == 'prepping':
- self._prepping_jobs = True
- else:
- self._prepping_jobs = False
+ self._prepping_jobs = False
+ for jobid in jobs.keys():
+ try:
+ job = self._jobs[jobid]
+ status = jobs[jobid]
+ job.set_status(status)
+ if status == 'prepping':
+ self._prepping_jobs = True
+ except KeyError:
+ pass
def stop(self):
self._stop = True
@@ -198,11 +202,11 @@
for addr in self._server_cfg.get_list("Email", "admin_emails"):
EmailUtils.email_result(sender, addr, msg, subject)
- def _handle_builder_reactivate(self, target_arches):
+ def _handle_builder_reactivate(self, target_list):
self._alive = True
self._ping_timeout = 0
- self._init_builder(target_arches)
+ self._init_builder(target_list)
print "Re-activating builder '%s'." % self._address
subject = "Builder Re-activated: %s" % self._address
@@ -221,7 +225,7 @@
self._server_lock.acquire()
if self._alive:
- self._update_cur_job()
+ self._update_building_jobs()
if self._unavail_count > 2:
# Kill all jobs on the client if it went away
@@ -233,9 +237,9 @@
else:
# Ping the builder every so often to see if it responds again
if time.time() > (self._ping_timeout + self._cur_ping_interval):
- (alive, target_arches) = self._ping_builder()
+ (alive, target_list) = self._ping_builder()
if alive:
- self._handle_builder_reactivate(target_arches)
+ self._handle_builder_reactivate(target_list)
else:
# Wait and ping again
self._ping_timeout = time.time()
@@ -251,9 +255,7 @@
"""
Can the builder start a new job right now?
"""
- if self._unavail_count > 2 or not self._alive:
- return False
- if self._cur_jobid:
+ if self._unavail_count > 2 or not self._alive or self.free_slots() <= 0:
return False
return True
@@ -272,14 +274,14 @@
host_port, path = urllib.splithost(addr)
host, port = urllib.splitport(host_port)
builder_dict['address'] = host
- builder_dict['port'] = port
arches = []
- for target in self._target_arches.keys():
- for arch in self._target_arches[target]:
+ for td in self._target_list:
+ for arch in td['supported_arches']:
if not arch in arches:
arches.append(arch)
builder_dict['arches'] = arches
+
if self._alive:
if self._cur_jobid:
builder_dict['status'] = 'building'
Index: BuilderManager.py
===================================================================
RCS file: /cvs/fedora/extras-buildsys/server/BuilderManager.py,v
retrieving revision 1.12
retrieving revision 1.13
diff -u -r1.12 -r1.13
--- BuilderManager.py 15 Aug 2005 03:18:20 -0000 1.12
+++ BuilderManager.py 25 Aug 2005 18:15:14 -0000 1.13
@@ -138,10 +138,11 @@
self._queue.remove(req)
continue
# Find a free builder for this request
+ target_dict = req['target_dict']
for builder in self._builders:
- if builder.available() and builder.can_build_arch_on_target(req['arch'], req['target']):
+ if builder.available() and builder.can_build_arch_on_target(target_dict):
try:
- job = builder.start_job(parent, req['target'], req['arch'], req['srpm_url'])
+ job = builder.start_job(parent, target_dict, req['srpm_url'])
except RuntimeError:
pass
else:
@@ -161,10 +162,10 @@
if len(self._queue) > 0:
time.sleep(0.25)
- def request_arch_job(self, par_job, target, arch, srpm_url, orphaned):
+ def request_arch_job(self, par_job, target_dict, srpm_url, orphaned):
req = {}
req['parent'] = par_job
- req['target'] = target
+ req['target_dict'] = target_dict
req['arch'] = arch
req['srpm_url'] = srpm_url
Index: Config.py
===================================================================
RCS file: /cvs/fedora/extras-buildsys/server/Config.py,v
retrieving revision 1.1
retrieving revision 1.2
diff -u -r1.1 -r1.2
--- Config.py 15 Aug 2005 03:18:20 -0000 1.1
+++ Config.py 25 Aug 2005 18:15:14 -0000 1.2
@@ -16,86 +16,19 @@
import os
from ConfigParser import ConfigParser
+from plague import BaseConfig
-class ConfigError(Exception):
- pass
+def make_target_string(distro, target, repo):
+ return "%s-%s-%s" % (distro, target, repo)
-class BaseConfig:
- def __init__(self, filename):
- self._filename = filename
- self._config = ConfigParser()
-
- def open(self, filename=None):
- if not filename:
- filename = self._filename
- if not os.path.exists(filename):
- raise ConfigError("Config file '%s' missing." % filename)
- self._config.read(filename)
- self._filename = filename
-
- def has_option(self, section, name):
- return self._config.has_option(section, name)
-
- def get_option(self, section, name):
- if not self._config.has_section(section):
- raise ConfigError("Invalid section: %s" % section)
- if self._config.has_option(section, name):
- return self._config.get(section, name)
- raise ConfigError("Config file %s does not have option: %s/%s" % (self._filename, section, name))
-
- def get_str(self, section, name):
- return self.get_option(section, name)
-
- def get_bool(self, section, name):
- opt = self.get_option(section, name)
- if type(opt) == type(""):
- if opt.lower() == 'yes' or opt.lower() == 'true':
- return True
- elif opt.lower() == 'no' or opt.lower() == 'false':
- return False
- raise ConfigError("Invalid format for %s/%s. Should be one of [yes, no, true, false]." % (section, name))
-
- def get_list(self, section, name):
- opt = self.get_option(section, name)
- if type(opt) == type(""):
- if not len(opt):
- return []
- try:
- return opt.split()
- except Exception:
- pass
- raise ConfigError("Invalid format for %s/%s. Should be a space-separate list." % (section, name))
-
- def get_int(self, section, name):
- opt = self.get_option(section, name)
- try:
- return int(opt)
- except Exception:
- pass
- raise ConfigError("Invalid format for %s/%s. Should be a valid integer." % (section, name))
-
- def add_section(self, section):
- self._config.add_section(section)
-
- def set_option(self, section, name, value):
- if not self._config.has_section(section):
- self._config.add_section(section)
- self._config.set(section, name, value)
-
- def save(self, filename=None):
- if not filename:
- filename = self._filename
- fp = open(filename, 'w')
- self._config.write(fp)
- self._filename = filename
-class ServerConfig(BaseConfig):
+class ServerConfig(BaseConfig.BaseConfig):
def __init__(self, filename):
- BaseConfig.__init__(self, filename)
+ BaseConfig.BaseConfig.__init__(self, filename)
try:
self.open()
- except ConfigError:
+ except BaseConfig.BaseConfig.ConfigError:
print "Config file did not exist. Writing %s with default values." % filename
self.save_default_config()
self._targets = []
@@ -154,19 +87,46 @@
self.save()
-class TargetConfig(BaseConfig):
+class TargetConfig(BaseConfig.BaseConfig):
def __init__(self, cfg, filename):
- BaseConfig.__init__(self, filename)
+ BaseConfig.BaseConfig.__init__(self, filename)
try:
self.open()
- except ConfigError:
+ except BaseConfig.BaseConfig.ConfigError:
print "Config file did not exist. Writing %s with default values." % filename
self.save_default_config()
self._parent_cfg = cfg
+ self._distro = self.get_str("General", "distro")
+ self._target = self.get_str("General", "target")
+ self._base_arches = self.get_str("Arches", "base_arches")
+ self._repo = self.get_str("General", "repo")
def parent_cfg(self):
return self._parent_cfg
+ def target_dict(self):
+ target_dict = {}
+ target_dict['distro'] = self._distro
+ target_dict['target'] = self._target
+ target_dict['arch'] = None # meaningless for a server-side target
+ target_dict['repo'] = self._repo
+ return target_dict
+
+ def target_string(self):
+ return make_target_string(self._distro, self._target, self._repo)
+
+ def distro(self):
+ return self._distro
+
+ def target(self):
+ return self._target
+
+ def basearches(self):
+ return self._base_arches
+
+ def repo(self):
+ return self._repo
+
def addl_pkg_arches(self):
if not self._config.has_section("Additional Package Arches"):
return []
@@ -184,7 +144,9 @@
def save_default_config(self, filename=None):
self.add_section("General")
- self.set_option("General", "name", "development")
+ self.set_option("General", "distro", "fedora")
+ self.set_option("General", "target", "development")
+ self.set_option("General", "repo", "core")
self.set_option("General", "scratch", "no")
self.set_option("General", "repo_script", "")
@@ -193,8 +155,8 @@
self.set_option("Arches", "optional_arches", "")
self.add_section("Aliases")
+ self.set_option("Aliases", "cvs_alias", "devel")
self.set_option("Aliases", "user_aliases", "devel")
- self.set_option("Aliases", "cvs_aliases", "devel")
self.add_section("Additional Package Arches")
self.set_option("Additional Package Arches", "kernel", "i686")
Index: Makefile
===================================================================
RCS file: /cvs/fedora/extras-buildsys/server/Makefile,v
retrieving revision 1.8
retrieving revision 1.9
diff -u -r1.8 -r1.9
--- Makefile 15 Aug 2005 12:05:01 -0000 1.8
+++ Makefile 25 Aug 2005 18:15:14 -0000 1.9
@@ -14,7 +14,7 @@
BuilderManager.py \
Builder.py \
BuildMaster.py \
- Config.py \
+ Config.py \
EmailUtils.py \
PackageJob.py \
Repo.py \
Index: PackageJob.py
===================================================================
RCS file: /cvs/fedora/extras-buildsys/server/PackageJob.py,v
retrieving revision 1.23
retrieving revision 1.24
diff -u -r1.23 -r1.24
--- PackageJob.py 15 Aug 2005 16:00:08 -0000 1.23
+++ PackageJob.py 25 Aug 2005 18:15:14 -0000 1.24
@@ -98,14 +98,18 @@
def make_job_log_url(base_url, target, uid, name, ver, release):
if target and uid and name and ver and release:
- return "%s/%s/%s-%s-%s-%s/" % (base_url, target, uid, name, ver, release)
+ if config_opts['log_url'].endswith('/'):
+ slash=''
+ else:
+ slash='/'
+ return "%s%s%s/%s-%s-%s-%s/" % (config_opts['log_url'], slash, target, uid, name, ver, release)
return None
class PackageJob:
""" Controller object for building 1 SRPM on multiple arches """
- def __init__(self, uid, username, package, cvs_tag, repo, buildmaster):
+ def __init__(self, uid, username, package, source, repo, buildmaster):
self.curstage = ''
self.result = 'in-progress'
self.bm = buildmaster
@@ -119,13 +123,15 @@
self._target_cfg = repo.target_cfg()
self._server_cfg = self._target_cfg.parent_cfg()
+ self.repo = repo
+ self._target_str = self._target_cfg.target_string()
+ self._target_dict = self._target_cfg.target_dict()
+
self.username = username
self.starttime = time.time()
self.endtime = 0
- self.target = repo.target()
- self.repo = repo
self.use_cvs = self._server_cfg.get_bool("CVS", "use_cvs")
- self.cvs_tag = cvs_tag
+ self._source = source
self.result_dir = None
self.srpm_path = None
self.srpm_http_path = None
@@ -259,7 +265,7 @@
err_msg = None
# Create the temporary checkout directory
- dirname = "%s-%s-%d" % (self.uid, self.cvs_tag, time.time())
+ dirname = "%s-%s-%d" % (self.uid, self._source, time.time())
tmpdir = self._server_cfg.get_str("Directories", "tmpdir")
self.checkout_tmpdir = os.path.join(tmpdir, dirname)
if os.path.exists(self.checkout_tmpdir):
@@ -274,11 +280,12 @@
# Checkout the module
cmd = 'cd %s; %s co -r %s %s %s' % (self.checkout_tmpdir, CVS_CMD,
- self.cvs_tag, self.package, env_args)
+ self._source, self.package, env_args)
debugprint("%d: Running %s" % (self.uid, cmd))
s, o = commands.getstatusoutput(cmd)
if s != 0:
- err_msg = "Erro: could not check out %s from %s - output was:\n\n%s" % (self.cvs_tag, self.target, o)
+ err_msg = "Error: could not check out %s from %s - output was:\n\n" \
+ "%s" % (self._source, self._target_str, o)
else:
# Just in case the 'common' directory didn't come along for the ride,
# get it from CVS
@@ -288,7 +295,8 @@
debugprint("%d: Running %s" % (self.uid, cmd))
s, o = commands.getstatusoutput(cmd)
if s != 0:
- err_msg = "Error: could not check out common directory - output was:\n\n%s" % (self.cvs_tag, self.target, o)
+ err_msg = "Error: could not check out common directory - " \
+ "output was:\n\n%s" % (self._source, self._target_str, o)
self.bm.notify_checkout_done(self)
@@ -299,17 +307,17 @@
return False
def _stage_make_srpm(self):
- # Map our target (self.target) to the CVS target alias, since CVS may have
+ # Map our target to the CVS target alias, since CVS may have
# different target names than we expose
- cvs_target = self.target
- cvs_aliases = self._target_cfg.get_str("Aliases", "cvs_aliases")
- if len(cvs_aliases) > 0:
- cvs_target = cvs_aliases
+ cvs_target = self._target_dict['target']
+ cvs_alias = self._target_cfg.get_str("Aliases", "cvs_alias")
+ if len(cvs_alias) > 0:
+ cvs_target = cvs_alias
self.srpm_path = None
srpm_dir = os.path.join(self.checkout_tmpdir, self.package, cvs_target)
if not os.path.exists(srpm_dir):
- msg = "could not find path %s for %s." % (srpm_dir, self.cvs_tag)
+ msg = "Error: could not find path %s for %s." % (srpm_dir, self._source)
raise PrepError(msg)
cmd = 'cd %s; %s srpm' % (srpm_dir, MAKE_CMD)
@@ -323,7 +331,7 @@
if line.find('..........') == -1 and len(line) > 0:
output_lines.append(line)
o = string.join(output_lines, '\n')
- msg = "Error: could not make srpm for %s - output was:\n\n%s" % (self.cvs_tag, o)
+ msg = "Error: could not make srpm for %s - output was:\n\n%s" % (self._source, o)
raise PrepError(msg)
srpmpath = None
@@ -334,7 +342,7 @@
srpmpath = path.strip()
break
if not srpmpath:
- msg = "Error: could not find srpm for %s - output was:\n\n%s" % (self.cvs_tag, o)
+ msg = "Error: could not find srpm for %s - output was:\n\n%s" % (self._source, o)
raise PrepError(msg)
self.srpm_path = srpmpath
@@ -346,7 +354,7 @@
# The dir will look like this:
# <rootdir>/devel/95-foo-1.1.0-23
pkgsubdir = '%d-%s-%s-%s' % (self.uid, self.name, self.ver, self.release)
- stage_dir = os.path.join(rootdir, self.target, pkgsubdir)
+ stage_dir = os.path.join(rootdir, self._target_str, pkgsubdir)
if os.path.exists(stage_dir):
shutil.rmtree(stage_dir, ignore_errors=True)
os.makedirs(stage_dir)
@@ -356,7 +364,7 @@
# In SRPM-only mode, cvs_tag is path to the SRPM to build
if self.use_cvs == False:
- self.srpm_path = self.cvs_tag
+ self.srpm_path = self._source
ts = rpmUtils.transaction.initReadOnlyTransaction()
hdr = rpmUtils.miscutils.hdrFromPackage(ts, self.srpm_path)
@@ -374,7 +382,7 @@
msg = """Package %s does not build on any architectures this build system supports.
Package: %s
Build System: %s
-""" % (self.cvs_tag, pkg_arches, allowed_arches)
+""" % (self._source, pkg_arches, allowed_arches)
raise PrepError(msg)
work_dir = self._server_cfg.get_str("Directories", "server_work_dir")
@@ -408,7 +416,12 @@
method = "http://"
hostname = self._server_cfg.get_str("General", "hostname")
srpm_url = method + hostname + ":8886/" + srpm_http_base
- self.bm.builder_manager.request_arch_job(self, self.target, arch, srpm_url, orphaned)
+ target_dict = {}
+ target_dict['distro'] = self._target_cfg.get_str("General", "distro")
+ target_dict['target'] = self._target_cfg.get_str("General", "target")
+ target_dict['arch'] = arch
+ target_dict['repo'] = self._target_cfg.get_str("General", "repo")
+ self.bm.builder_manager.request_arch_job(self, target_dict, srpm_url, orphaned)
def _request_arch_jobs(self):
# Queue requests for build jobs
@@ -454,7 +467,7 @@
self.wake()
def _handle_death(self):
- resultstring = "%s (%s): Build on target %s was killed by %s." % (self.uid, self.name, self.target, self._killer)
+ resultstring = "%s (%s): Build on target %s was killed by %s." % (self.uid, self.name, self._target_str, self._killer)
self.result = 'killed'
self._set_cur_stage('finished', resultstring)
self.email_result(self.username, resultstring)
@@ -498,13 +511,13 @@
except PrepError, e:
if self.use_cvs == True:
shutil.rmtree(self.checkout_tmpdir, ignore_errors=True)
- subj = 'Prep Error (Job %s): %s on %s' % (self.uid, self.cvs_tag, self.target)
+ subj = 'Prep Error (Job %s): %s on %s' % (self.uid, self._source, self._target_str)
self.email_result(self.username, resultstring=e.args, subject=subj)
self._stage_failed(e.args)
except BuildError, e:
- subj = 'Build Error (Job %s): %s on %s' % (self.uid, self.cvs_tag, self.target)
+ subj = 'Build Error (Job %s): %s on %s' % (self.uid, self._source, self._target_str)
base_url = self._server_cfg.get_str("UI", "log_url")
- log_url = make_job_log_url(base_url, self.target, self.uid, self.name, self.ver, self.release)
+ log_url = make_job_log_url(base_url, self._target_str, self.uid, self.name, self.ver, self.release)
msg = "%s\n\n Build logs may be found at %s\n\n" % (e.msg, log_url)
logtail = self._get_log_tail(e.arch)
msg = "%s\n-------------------------------------------------\n\n%s\n" % (msg, logtail)
@@ -567,6 +580,9 @@
self.bm.notify_job_done(self)
def _cleanup_job_files(self):
+ if not self.result_dir or not self.srpm_http_path:
+ return
+
srpm_file = os.path.join(self.result_dir, os.path.basename(self.srpm_http_path))
# Delete any RPMs in the arch dirs
@@ -604,9 +620,9 @@
src_file = os.path.join(self.result_dir, job.arch, f)
verrel = "%s-%s" % (self.ver, self.release)
if f.endswith(".src.rpm"):
- dst_path = os.path.join(repo_dir, self.target, self.name, verrel, "SRPM")
+ dst_path = os.path.join(repo_dir, self._target_str, self.name, verrel, "SRPM")
else:
- dst_path = os.path.join(repo_dir, self.target, self.name, verrel, job.arch)
+ dst_path = os.path.join(repo_dir, self._target_str, self.name, verrel, job.arch)
self.repofiles[src_file] = os.path.join(dst_path, f)
self._event.clear()
@@ -625,14 +641,14 @@
self.wake()
def _stage_repodone(self):
- resultstring = " %s (%s): Build on target %s succeeded." % (self.uid, self.name, self.target)
+ resultstring = " %s (%s): Build on target %s succeeded." % (self.uid, self.name, self._target_str)
self.result = 'success'
self._set_cur_stage('needsign', resultstring)
self._cleanup_job_files()
base_url = self._server_cfg.get_str("UI", "log_url")
- log_url = make_job_log_url(base_url, self.target, self.uid, self.name, self.ver, self.release)
+ log_url = make_job_log_url(base_url, self._target_str, self.uid, self.name, self.ver, self.release)
resultstring = resultstring + "\n Build logs may be found at %s\n" % (log_url)
self.email_result(self.username, resultstring)
@@ -647,7 +663,7 @@
pkg_dir = "%s-%s-%s-%s" % (self.uid, self.name, self.ver, self.release)
work_dir = self._server_cfg.get_str("Directories", "server_work_dir")
- log_dir = os.path.join(work_dir, self.target, pkg_dir, arch)
+ log_dir = os.path.join(work_dir, self._target_str, pkg_dir, arch)
final_log = None
build_log = "%s/%s" % (log_dir, "build.log")
root_log = "%s/%s" % (log_dir, "root.log")
@@ -694,7 +710,7 @@
name = self.name
if not name:
name = self.package
- subject = 'Build Result: %d - %s on %s' % (self.uid, name, self.target)
+ subject = 'Build Result: %d - %s on %s' % (self.uid, name, self._target_str)
sender = self._server_cfg.get_str("Email", "email_from")
EmailUtils.email_result(sender, to, resultstring, subject)
Index: Repo.py
===================================================================
RCS file: /cvs/fedora/extras-buildsys/server/Repo.py,v
retrieving revision 1.13
retrieving revision 1.14
diff -u -r1.13 -r1.14
--- Repo.py 15 Aug 2005 12:05:02 -0000 1.13
+++ Repo.py 25 Aug 2005 18:15:14 -0000 1.14
@@ -30,14 +30,14 @@
def __init__(self, target_cfg, repodir, builder_manager):
self._builder_manager = builder_manager
self._target_cfg = target_cfg
- self._target = self._target_cfg.get_str("General", "name")
+ self._target_dict = target_cfg.target_dict()
if not os.path.exists(repodir):
print "Error: Repository directory '%s' does not exist." % repodir
os._exit(1)
- self._repodir = os.path.join(repodir, self._target)
+ self._repodir = os.path.join(repodir, self._target_cfg.target_string())
if not os.path.exists(self._repodir):
os.makedirs(self._repodir)
- self._repo_cache_dir = os.path.join(repodir, "cache", self._target)
+ self._repo_cache_dir = os.path.join(repodir, "cache", self._target_cfg.target_string())
if not os.path.exists(self._repo_cache_dir):
os.makedirs(self._repo_cache_dir)
self._lock = threading.Lock()
@@ -54,9 +54,6 @@
threading.Thread.__init__(self)
- def target(self):
- return self._target
-
def target_cfg(self):
return self._target_cfg
Index: UserInterface.py
===================================================================
RCS file: /cvs/fedora/extras-buildsys/server/UserInterface.py,v
retrieving revision 1.48
retrieving revision 1.49
diff -u -r1.48 -r1.49
--- UserInterface.py 15 Aug 2005 14:53:52 -0000 1.48
+++ UserInterface.py 25 Aug 2005 18:15:14 -0000 1.49
@@ -364,7 +364,7 @@
# Get all archjobs for this job
if len(uids) > 0:
sql = "SELECT jobid, parent_uid, starttime, endtime, arch, builder_addr, " \
- "builder_port, status, builder_status FROM archjobs WHERE " + uids
+ "status, builder_status FROM archjobs WHERE " + uids
curs.execute(sql)
data = curs.fetchall()
for row in data:
@@ -375,7 +375,6 @@
ajrec['endtime'] = row['endtime']
ajrec['arch'] = row['arch']
ajrec['builder_addr'] = row['builder_addr']
- ajrec['builder_port'] = row['builder_port']
ajrec['status'] = row['status']
ajrec['builder_status'] = row['builder_status']
for job in jobs:
@@ -431,7 +430,7 @@
jobrec['archjobs'] = []
# Get all archjobs for this job
- sql = "SELECT jobid, parent_uid, starttime, endtime, arch, builder_addr, builder_port, " \
+ sql = "SELECT jobid, parent_uid, starttime, endtime, arch, builder_addr, " \
"status, builder_status FROM archjobs WHERE parent_uid=%d " % uid
curs.execute(sql)
data = curs.fetchall()
@@ -443,7 +442,6 @@
ajrec['endtime'] = row['endtime']
ajrec['arch'] = row['arch']
ajrec['builder_addr'] = row['builder_addr']
- ajrec['builder_port'] = row['builder_port']
ajrec['status'] = row['status']
ajrec['builder_status'] = row['builder_status']
jobrec['archjobs'].append(ajrec)
Index: main.py
===================================================================
RCS file: /cvs/fedora/extras-buildsys/server/main.py,v
retrieving revision 1.12
retrieving revision 1.13
diff -u -r1.12 -r1.13
--- main.py 15 Aug 2005 03:18:20 -0000 1.12
+++ main.py 25 Aug 2005 18:15:14 -0000 1.13
@@ -142,7 +142,6 @@
print "Shutting down..."
bm.stop()
srpm_server.stop()
- del bm
print "Done."
os._exit(0)
More information about the fedora-extras-commits
mailing list