[Date Prev][Date Next] [Thread Prev][Thread Next] [Date Index] [Thread Index]

[dak/master] misc fixes



Signed-off-by: Mark Hymers <mhy@debian.org>
---
 dak/process_accepted.py |   65 +++++++++++++++++++++-----------------
 daklib/binary.py        |    9 ++++-
 daklib/dbconn.py        |    2 +-
 daklib/queue.py         |   80 +++++++++++++++++++++++-----------------------
 daklib/utils.py         |    2 +-
 5 files changed, 85 insertions(+), 73 deletions(-)

diff --git a/dak/process_accepted.py b/dak/process_accepted.py
index 0626e7d..9a56fa2 100755
--- a/dak/process_accepted.py
+++ b/dak/process_accepted.py
@@ -44,7 +44,7 @@ import re
 import apt_pkg, commands
 
 from daklib import daklog
-from daklib import queue
+from daklib.queue import *
 from daklib import utils
 from daklib.dbconn import *
 from daklib.binary import copy_temporary_contents
@@ -145,7 +145,7 @@ def action (u, stable_queue=None, log_urgency=True):
         if stable_queue:
             stable_install(u, summary, short_summary, stable_queue, log_urgency)
         else:
-            install(u, log_urgency)
+            install(u, session, log_urgency)
     elif answer == 'Q':
         sys.exit(0)
 
@@ -173,8 +173,8 @@ def add_dsc_to_db(u, filename, session):
     source.source = u.pkg.dsc["source"]
     source.version = u.pkg.dsc["version"] # NB: not files[file]["version"], that has no epoch
     source.maintainer_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
-    source.changedby_id = get_or_set_maintainer(u.pkg.dsc["changed-by"], session).maintainer_id
-    source.fingerprint_id = get_or_set_fingerprint(u.pkg.dsc["fingerprint"], session).fingerprint_id
+    source.changedby_id = get_or_set_maintainer(u.pkg.changes["changed-by"], session).maintainer_id
+    source.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
     source.install_date = datetime.now().date()
 
     dsc_component = entry["component"]
@@ -206,7 +206,7 @@ def add_dsc_to_db(u, filename, session):
     dscfile.poolfile_id = entry["files id"]
     session.add(dscfile)
 
-    for dsc_file, dentry in u.pkg.dsc_files.keys():
+    for dsc_file, dentry in u.pkg.dsc_files.items():
         df = DSCFile()
         df.source_id = source.source_id
 
@@ -214,8 +214,16 @@ def add_dsc_to_db(u, filename, session):
         # files id is stored in dsc_files by check_dsc().
         files_id = dentry.get("files id", None)
 
+        # Find the entry in the files hash
+        # TODO: Bail out here properly
+        dfentry = None
+        for f, e in u.pkg.files.items():
+            if f == dsc_file:
+                dfentry = e
+                break
+
         if files_id is None:
-            filename = dentry["pool name"] + dsc_file
+            filename = dfentry["pool name"] + dsc_file
 
             (found, obj) = check_poolfile(filename, dentry["size"], dentry["md5sum"], dsc_location_id)
             # FIXME: needs to check for -1/-2 and or handle exception
@@ -224,6 +232,9 @@ def add_dsc_to_db(u, filename, session):
 
             # If still not found, add it
             if files_id is None:
+                # HACK: Force sha1sum etc into dentry
+                dentry["sha1sum"] = dfentry["sha1sum"]
+                dentry["sha256sum"] = dfentry["sha256sum"]
                 poolfile = add_poolfile(filename, dentry, dsc_location_id, session)
                 files_id = poolfile.file_id
 
@@ -233,7 +244,7 @@ def add_dsc_to_db(u, filename, session):
     session.flush()
 
     # Add the src_uploaders to the DB
-    uploader_ids = [maintainer_id]
+    uploader_ids = [source.maintainer_id]
     if u.pkg.dsc.has_key("uploaders"):
         for up in u.pkg.dsc["uploaders"].split(","):
             up = up.strip()
@@ -249,7 +260,7 @@ def add_dsc_to_db(u, filename, session):
 
         su = SrcUploader()
         su.maintainer_id = up
-        su.source_id = source_id
+        su.source_id = source.source_id
         session.add(su)
 
     session.flush()
@@ -275,6 +286,7 @@ def add_deb_to_db(u, filename, session):
 
     # Find poolfile id
     filename = entry["pool name"] + filename
+    fullpath = os.path.join(cnf["Dir::Pool"], filename)
     if not entry.get("location id", None):
         entry["location id"] = get_location(cnf["Dir::Pool"], entry["component"], utils.where_am_i(), session).location_id
 
@@ -285,7 +297,7 @@ def add_deb_to_db(u, filename, session):
     bin.poolfile_id = entry["files id"]
 
     # Find source id
-    bin_sources = get_sources_from_name(entry["source package"], entry["source version"])
+    bin_sources = get_sources_from_name(entry["source package"], entry["source version"], session=session)
     if len(bin_sources) != 1:
         raise NoSourceFieldError, "Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \
                                   (bin.package, bin.version, bin.architecture.arch_string,
@@ -302,28 +314,25 @@ def add_deb_to_db(u, filename, session):
         ba = BinAssociation()
         ba.binary_id = bin.binary_id
         ba.suite_id = get_suite(suite_name).suite_id
-        session.add(sa)
+        session.add(ba)
 
     session.flush()
 
     # Deal with contents
-    contents = copy_temporary_contents(bin.package, bin.version, bin.architecture.arch_string, filename, reject=None)
+    contents = copy_temporary_contents(bin.package, bin.version, bin.architecture.arch_string, fullpath, None, session)
     if not contents:
-        print "REJECT\n" + "\n".join(contents.rejects)
+        print "REJECT\nCould not determine contents of package %s" % bin.package
         session.rollback()
         raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
 
 
-def install(u, log_urgency=True):
+def install(u, session, log_urgency=True):
     cnf = Config()
     summarystats = SummaryStats()
 
     print "Installing."
 
-    Logger.log(["installing changes",pkg.changes_file])
-
-    # Begin a transaction; if we bomb out anywhere between here and the COMMIT WORK below, the DB will not be changed.
-    session = DBConn().session()
+    Logger.log(["installing changes", u.pkg.changes_file])
 
     # Ensure that we have all the hashes we need below.
     u.ensure_hashes()
@@ -334,12 +343,12 @@ def install(u, log_urgency=True):
         return
 
     # Add the .dsc file to the DB first
-    for newfile in u.pkg.files.keys():
+    for newfile, entry in u.pkg.files.items():
         if entry["type"] == "dsc":
             dsc_component, dsc_location_id = add_dsc_to_db(u, newfile, session)
 
     # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
-    for newfile in u.pkg.files.keys():
+    for newfile, entry in u.pkg.files.items():
         if entry["type"] == "deb":
             add_deb_to_db(u, newfile, session)
 
@@ -463,9 +472,8 @@ def install(u, log_urgency=True):
     summarystats.accept_count += 1
 
 ################################################################################
-### XXX: UP TO HERE
 
-def stable_install(u, summary, short_summary, fromsuite_name="proposed-updates"):
+def stable_install(u, session, summary, short_summary, fromsuite_name="proposed-updates"):
     summarystats = SummaryStats()
 
     fromsuite_name = fromsuite_name.lower()
@@ -478,10 +486,6 @@ def stable_install(u, summary, short_summary, fromsuite_name="proposed-updates")
     fromsuite = get_suite(fromsuite_name)
     tosuite = get_suite(tosuite_name)
 
-    # Begin a transaction; if we bomb out anywhere between here and
-    # the COMMIT WORK below, the DB won't be changed.
-    session = DBConn().session()
-
     # Add the source to stable (and remove it from proposed-updates)
     for newfile, entry in u.pkg.files.items():
         if entry["type"] == "dsc":
@@ -591,7 +595,7 @@ def stable_install(u, summary, short_summary, fromsuite_name="proposed-updates")
 
 ################################################################################
 
-def process_it(changes_file, stable_queue=None, log_urgency=True):
+def process_it(changes_file, stable_queue, log_urgency, session):
     cnf = Config()
     u = Upload()
 
@@ -612,13 +616,13 @@ def process_it(changes_file, stable_queue=None, log_urgency=True):
         # overwrite_checks should not be performed if installing to stable
         overwrite_checks = False
 
-    u.load_dot_dak(cfile)
+    u.pkg.load_dot_dak(cfile)
     u.update_subst()
 
     if stable_queue:
         u.pkg.changes_file = old
 
-    u.accepted_checks(overwrite_checks)
+    u.accepted_checks(overwrite_checks, True, session)
     action(u, stable_queue, log_urgency)
 
     # Restore CWD
@@ -670,10 +674,13 @@ def main():
     # Sort the .changes files so that we process sourceful ones first
     changes_files.sort(utils.changes_compare)
 
+
     # Process the changes files
     for changes_file in changes_files:
         print "\n" + changes_file
-        process_it(changes_file, stable_queue, log_urgency)
+        session = DBConn().session()
+        process_it(changes_file, stable_queue, log_urgency, session)
+        session.close()
 
     if summarystats.accept_count:
         sets = "set"
diff --git a/daklib/binary.py b/daklib/binary.py
index 9272795..0a27f95 100755
--- a/daklib/binary.py
+++ b/daklib/binary.py
@@ -264,14 +264,16 @@ def copy_temporary_contents(package, version, archname, deb, reject, session=Non
     contents stored in pending_content_associations
     """
 
-    # first see if contents exist:
     cnf = Config()
 
+    privatetrans = False
     if session is None:
         session = DBConn().session()
+        privatetrans = True
 
     arch = get_architecture(archname, session=session)
 
+    # first see if contents exist:
     in_pcaq = """SELECT 1 FROM pending_content_associations
                                WHERE package=:package
                                AND version=:version
@@ -281,7 +283,7 @@ def copy_temporary_contents(package, version, archname, deb, reject, session=Non
             'version': version,
             'archid': arch.arch_id}
 
-    exists = True
+    exists = None
     check = session.execute(in_pcaq, vals)
 
     if check.rowcount > 0:
@@ -311,6 +313,9 @@ def copy_temporary_contents(package, version, archname, deb, reject, session=Non
         session.execute(sql, vals)
         session.commit()
 
+    if privatetrans:
+        session.close()
+
     return exists
 
 __all__.append('copy_temporary_contents')
diff --git a/daklib/dbconn.py b/daklib/dbconn.py
index 4669cfc..556921e 100755
--- a/daklib/dbconn.py
+++ b/daklib/dbconn.py
@@ -1579,7 +1579,7 @@ class Queue(object):
 
                 # If it does, update things to ensure it's not removed prematurely
                 else:
-                    qb = get_queue_build(dest, suite_id, session)
+                    qb = get_queue_build(dest, s.suite_id, session)
                     if qb is None:
                         qb.in_queue = True
                         qb.last_used = None
diff --git a/daklib/queue.py b/daklib/queue.py
index fe23180..ae4cf0b 100755
--- a/daklib/queue.py
+++ b/daklib/queue.py
@@ -53,20 +53,20 @@ from binary import Binary
 
 ###############################################################################
 
-def get_type(f, session=None):
+def get_type(f, session):
     """
     Get the file type of C{f}
 
     @type f: dict
     @param f: file entry from Changes object
 
+    @type session: SQLA Session
+    @param session: SQL Alchemy session object
+
     @rtype: string
     @return: filetype
 
     """
-    if session is None:
-        session = DBConn().session()
-
     # Determine the type
     if f.has_key("dbtype"):
         file_type = file["dbtype"]
@@ -115,7 +115,7 @@ def determine_new(changes, files, warn=1):
         pkg = f["package"]
         priority = f["priority"]
         section = f["section"]
-        file_type = get_type(f)
+        file_type = get_type(f, session)
         component = f["component"]
 
         if file_type == "dsc":
@@ -160,6 +160,8 @@ def determine_new(changes, files, warn=1):
             if new[pkg].has_key("othercomponents"):
                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
 
+    session.close()
+
     return new
 
 ################################################################################
@@ -306,7 +308,7 @@ class Upload(object):
 
         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
         if not self.pkg.changes.has_key("architecture") or not \
-           isinstance(changes["architecture"], DictType):
+           isinstance(self.pkg.changes["architecture"], DictType):
             self.pkg.changes["architecture"] = { "Unknown" : "" }
 
         # and maintainer2047 may not exist.
@@ -323,7 +325,7 @@ class Upload(object):
            (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
 
             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
-            self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], changes["maintainer2047"])
+            self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
         else:
             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
@@ -895,6 +897,8 @@ class Upload(object):
             for suite in self.pkg.changes["distribution"].keys():
                 self.per_suite_file_checks(f, suite, session)
 
+        session.close()
+
         # If the .changes file says it has source, it must have source.
         if self.pkg.changes["architecture"].has_key("source"):
             if not has_source:
@@ -1014,9 +1018,10 @@ class Upload(object):
             self.rejects.append("%s: no .tar.gz or .orig.tar.gz in 'Files' field." % (dsc_filename))
 
         # Ensure source is newer than existing source in target suites
-        self.check_source_against_db(dsc_filename)
-
-        self.check_dsc_against_db(dsc_filename)
+        session = DBConn().session()
+        self.check_source_against_db(dsc_filename, session)
+        self.check_dsc_against_db(dsc_filename, session)
+        session.close()
 
         return True
 
@@ -1331,6 +1336,8 @@ class Upload(object):
                 if self.pkg.files[f].has_key("new"):
                     self.rejects.append("%s may not upload NEW file %s" % (uid, f))
 
+        session.close()
+
     ###########################################################################
     def build_summaries(self):
         """ Build a summary of changes the upload introduces. """
@@ -1491,7 +1498,7 @@ distribution."""
             targetdir = cnf["Dir::Queue::Accepted"]
 
         print "Accepting."
-	if self.logger:
+        if self.logger:
             self.logger.log(["Accepting changes", self.pkg.changes_file])
 
         self.pkg.write_dot_dak(targetdir)
@@ -1771,7 +1778,7 @@ distribution."""
         return 0
 
     ################################################################################
-    def in_override_p(self, package, component, suite, binary_type, file, session=None):
+    def in_override_p(self, package, component, suite, binary_type, file, session):
         """
         Check if a package already has override entries in the DB
 
@@ -1796,9 +1803,6 @@ distribution."""
 
         cnf = Config()
 
-        if session is None:
-            session = DBConn().session()
-
         if binary_type == "": # must be source
             file_type = "dsc"
         else:
@@ -1834,8 +1838,9 @@ distribution."""
 
         Description: TODO
         """
+        Cnf = Config()
         anyversion = None
-        anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
+        anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
         for (s, v) in sv_list:
             if s in [ x.lower() for x in anysuite ]:
                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
@@ -1923,10 +1928,7 @@ distribution."""
                         self.reject.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
 
     ################################################################################
-    def check_binary_against_db(self, file, session=None):
-        if session is None:
-            session = DBConn().session()
-
+    def check_binary_against_db(self, file, session):
         # Ensure version is sane
         q = session.query(BinAssociation)
         q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[file]["package"])
@@ -1945,12 +1947,9 @@ distribution."""
 
     ################################################################################
 
-    def check_source_against_db(self, file, session=None):
+    def check_source_against_db(self, file, session):
         """
         """
-        if session is None:
-            session = DBConn().session()
-
         source = self.pkg.dsc.get("source")
         version = self.pkg.dsc.get("version")
 
@@ -1962,7 +1961,7 @@ distribution."""
                                        file, version, sourceful=True)
 
     ################################################################################
-    def check_dsc_against_db(self, file, session=None):
+    def check_dsc_against_db(self, file, session):
         """
 
         @warning: NB: this function can remove entries from the 'files' index [if
@@ -1973,9 +1972,7 @@ distribution."""
 
         """
 
-        if session is None:
-            session = DBConn().session()
-
+        Cnf = Config()
         self.pkg.orig_tar_gz = None
 
         # Try and find all files mentioned in the .dsc.  This has
@@ -1991,7 +1988,7 @@ distribution."""
                 found = "%s in incoming" % (dsc_name)
 
                 # Check the file does not already exist in the archive
-                ql = get_poolfile_like_name(dsc_name)
+                ql = get_poolfile_like_name(dsc_name, session)
 
                 # Strip out anything that isn't '%s' or '/%s$'
                 for i in ql:
@@ -2060,18 +2057,18 @@ distribution."""
                     old_file_fh.close()
                     actual_size = os.stat(old_file)[stat.ST_SIZE]
                     found = old_file
-                    suite_type = f.location.archive_type
+                    suite_type = x.location.archive_type
                     # need this for updating dsc_files in install()
-                    dsc_entry["files id"] = f.file_id
+                    dsc_entry["files id"] = x.file_id
                     # See install() in process-accepted...
-                    self.pkg.orig_tar_id = f.file_id
+                    self.pkg.orig_tar_id = x.file_id
                     self.pkg.orig_tar_gz = old_file
-                    self.pkg.orig_tar_location = f.location.location_id
+                    self.pkg.orig_tar_location = x.location.location_id
                 else:
                     # TODO: Record the queues and info in the DB so we don't hardcode all this crap
                     # Not there? Check the queue directories...
                     for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
-                        in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (directory)], dsc_name)
+                        in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
                         if os.path.exists(in_otherdir):
                             in_otherdir_fh = utils.open_file(in_otherdir)
                             actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
@@ -2093,18 +2090,21 @@ distribution."""
                 self.rejects.append("size for %s doesn't match %s." % (found, file))
 
     ################################################################################
-    def accepted_checks(self, overwrite_checks=True, session=None):
+    def accepted_checks(self, overwrite_checks, session):
         # Recheck anything that relies on the database; since that's not
         # frozen between accept and our run time when called from p-a.
 
         # overwrite_checks is set to False when installing to stable/oldstable
 
-        if session is None:
-            session = DBConn().session()
-
         propogate={}
         nopropogate={}
 
+        # Find the .dsc (again)
+        dsc_filename = None
+        for f in self.pkg.files.keys():
+            if self.pkg.files[f]["type"] == "dsc":
+                dsc_filename = f
+
         for checkfile in self.pkg.files.keys():
             # The .orig.tar.gz can disappear out from under us is it's a
             # duplicate of one in the archive.
@@ -2131,7 +2131,7 @@ distribution."""
 
             # propogate in the case it is in the override tables:
             for suite in self.pkg.changes.get("propdistribution", {}).keys():
-                if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile):
+                if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
                     propogate[suite] = 1
                 else:
                     nopropogate[suite] = 1
@@ -2144,7 +2144,7 @@ distribution."""
         for checkfile in self.pkg.files.keys():
             # Check the package is still in the override tables
             for suite in self.pkg.changes["distribution"].keys():
-                if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile):
+                if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
                     self.rejects.append("%s is NEW for %s." % (checkfile, suite))
 
     ################################################################################
diff --git a/daklib/utils.py b/daklib/utils.py
index 7b13976..32cba98 100755
--- a/daklib/utils.py
+++ b/daklib/utils.py
@@ -671,7 +671,7 @@ def TemplateSubst(map, filename):
     templatefile = open_file(filename)
     template = templatefile.read()
     for x in map.keys():
-        template = template.replace(x,map[x])
+        template = template.replace(x, str(map[x]))
     templatefile.close()
     return template
 
-- 
1.5.6.5



Reply to: