[Date Prev][Date Next] [Thread Prev][Thread Next] [Date Index] [Thread Index]

[dak/master 07/16] before I rip out pending_*



---
 dak/contents.py         |  518 ++++++++++++++++++++++++++++++++++++++---------
 dak/dakdb/update19.py   |  239 ++++++++++++++++++++++
 dak/process_accepted.py |   21 ++-
 daklib/binary.py        |   92 ++++++---
 daklib/dbconn.py        |   87 ++++++--
 5 files changed, 809 insertions(+), 148 deletions(-)
 create mode 100644 dak/dakdb/update19.py

diff --git a/dak/contents.py b/dak/contents.py
index 834cbcc..c0d00c8 100755
--- a/dak/contents.py
+++ b/dak/contents.py
@@ -93,101 +93,240 @@ class EndOfContents(object):
     """
     pass
 
-class GzippedContentWriter(object):
+class OneAtATime(object):
     """
-    An object which will write contents out to a Contents-$arch.gz
-    file on a separate thread
     """
+    def __init__(self):
+        self.next_in_line = None
+        self.next_lock = threading.Condition()
+
+    def enqueue(self, next):
+        self.next_lock.acquire()
+        while self.next_in_line:
+            self.next_lock.wait()
+            
+        assert( not self.next_in_line )
+        self.next_in_line = next
+        self.next_lock.notify()
+        self.next_lock.release()
+
+    def dequeue(self):
+        self.next_lock.acquire()
+        while not self.next_in_line:
+            self.next_lock.wait()
+        result = self.next_in_line
+        self.next_in_line = None
+        self.next_lock.notify()
+        self.next_lock.release()
+        return result
+
+class ContentsWorkThread(threading.Thread):
+    """
+    """
+    def __init__(self, upstream, downstream):
+        threading.Thread.__init__(self)
+        self.upstream = upstream
+        self.downstream = downstream
 
-    header = None # a class object holding the header section of contents file
+    def run(self):
+        while True:
+            try:
+                contents_file = self.upstream.dequeue()
+                if isinstance(contents_file,EndOfContents):
+                    if self.downstream:
+                        self.downstream.enqueue(contents_file)
+                    break
+
+                s = datetime.datetime.now()
+                print("%s start: %s" % (self,contents_file) )
+                self._run(contents_file)
+                print("%s finished: %s in %d seconds" % (self, contents_file, (datetime.datetime.now()-s).seconds ))
+                if self.downstream:
+                    self.downstream.enqueue(contents_file)
+            except:
+                traceback.print_exc()
+
+class QueryThread(ContentsWorkThread):
+    def __init__(self, upstream, downstream):
+        ContentsWorkThread.__init__(self, upstream, downstream)
+
+    def __str__(self):
+        return "QueryThread"
+    __repr__ = __str__
+
+    def _run(self, contents_file):
+        contents_file.query()
+
+class IngestThread(ContentsWorkThread):
+    def __init__(self, upstream, downstream):
+        ContentsWorkThread.__init__(self, upstream, downstream)
+
+    def __str__(self):
+        return "IngestThread"
+    __repr__ = __str__
+
+    def _run(self, contents_file):
+        contents_file.ingest()
+
+class SortThread(ContentsWorkThread):
+    def __init__(self, upstream, downstream):
+        ContentsWorkThread.__init__(self, upstream, downstream)
+
+    def __str__(self):
+        return "SortThread"
+    __repr__ = __str__
+
+    def _run(self, contents_file):
+        contents_file.sorted_keys = sorted(contents_file.filenames.keys())
+
+class OutputThread(ContentsWorkThread):
+    def __init__(self, upstream, downstream):
+        ContentsWorkThread.__init__(self, upstream, downstream)
+
+    def __str__(self):
+        return "OutputThread"
+    __repr__ = __str__
+
+    def _run(self, contents_file):
+        contents_file.open_file()
+        for fname in contents_file.sorted_keys:
+            contents_file.filehandle.write("%s\t%s\n" % (fname,contents_file.filenames[fname]))
+        contents_file.sorted_keys = None
+        contents_file.filenames.clear()
+    
+class GzipThread(ContentsWorkThread):
+    def __init__(self, upstream, downstream):
+        ContentsWorkThread.__init__(self, upstream, downstream)
+
+    def __str__(self):
+        return "GzipThread"
+    __repr__ = __str__
+
+    def _run(self, contents_file):
+        os.system("gzip -f %s" % contents_file.filename)
+
+class ContentFile(object):
+    def __init__(self,
+                 filename,
+                 suite_str,
+                 suite_id)
+
+        self.filename = filename
+        self.filenames = {}
+        self.sorted_keys = None
+        self.suite_str = suite_str
+        self.suite_id = suite_id
+        self.cursor = None
+        self.filehandle = None
+
+    def __str__(self):
+        return self.filename
+    __repr__ = __str__
+
+
+    def cleanup(self):
+        self.filenames = None
+        self.sortedkeys = None
+        self.filehandle.close()
+        self.cursor.close()
+
+    def ingest(self):
+        while True:
+            r = self.cursor.fetchone()
+            if not r:
+                break
+            filename, package = r
+            if self.filenames.has_key(filename):
+                self.filenames[filename] += ",%s" % (package)
+            else:
+                self.filenames[filename] = "%s" % (package)
+        self.cursor.close()
 
-    def __init__(self, filename):
-        """
-        @type filename: string
-        @param filename: the name of the file to write to
-        """
-        self.queue = Queue.Queue()
-        self.current_file = None
-        self.first_package = True
-        self.output = self.open_file(filename)
-        self.thread = threading.Thread(target=self.write_thread,
-                                       name='Contents writer')
-        self.thread.start()
-
-    def open_file(self, filename):
+    def open_file(self):
         """
         opens a gzip stream to the contents file
         """
-        filepath = Config()["Contents::Root"] + filename
-        filedir = os.path.dirname(filepath)
+#        filepath = Config()["Contents::Root"] + self.filename
+        self.filename = "/home/stew/contents/" + self.filename
+        filedir = os.path.dirname(self.filename)
         if not os.path.isdir(filedir):
             os.makedirs(filedir)
-        return gzip.open(filepath, "w")
-
-    def write(self, filename, section, package):
-        """
-        enqueue content to be written to the file on a separate thread
-        """
-        self.queue.put((filename,section,package))
-
-    def write_thread(self):
-        """
-        the target of a Thread which will do the actual writing
-        """
-        while True:
-            next = self.queue.get()
-            if isinstance(next, EndOfContents):
-                self.output.write('\n')
-                self.output.close()
-                break
+#        self.filehandle = gzip.open(self.filename, "w")
+        self.filehandle = open(self.filename, "w")
+        self._write_header()
 
-            (filename,section,package)=next
-            if next != self.current_file:
-                # this is the first file, so write the header first
-                if not self.current_file:
-                    self.output.write(self._getHeader())
+    def _write_header(self):
+        self._get_header();
+        self.filehandle.write(ContentFile.header)
 
-                self.output.write('\n%s\t' % filename)
-                self.first_package = True
-
-            self.current_file=filename
-
-            if not self.first_package:
-                self.output.write(',')
-            else:
-                self.first_package=False
-            self.output.write('%s/%s' % (section,package))
-
-    def finish(self):
-        """
-        enqueue the sentry object so that writers will know to terminate
-        """
-        self.queue.put(EndOfContents())
+    header=None
 
     @classmethod
-    def _getHeader(self):
+    def _get_header(self):
         """
         Internal method to return the header for Contents.gz files
 
         This is boilerplate which explains the contents of the file and how
         it can be used.
         """
-        if not GzippedContentWriter.header:
+        if not ContentFile.header:
             if Config().has_key("Contents::Header"):
                 try:
                     h = open(os.path.join( Config()["Dir::Templates"],
                                            Config()["Contents::Header"] ), "r")
-                    GzippedContentWriter.header = h.read()
+                    ContentFile.header = h.read()
                     h.close()
                 except:
                     log.error( "error opening header file: %d\n%s" % (Config()["Contents::Header"],
                                                                       traceback.format_exc() ))
-                    GzippedContentWriter.header = None
+                    ContentFile.header = None
             else:
-                GzippedContentWriter.header = None
-
-        return GzippedContentWriter.header
-
+                ContentFile.header = None
+
+        return ContentFile.header
+
+
+class DebContentFile(ContentFile):
+    def __init__(self,
+                 filename,
+                 suite_str,
+                 suite_id,
+                 arch_str,
+                 arch_id):
+        ContentFile.__init__(self,
+                             filename,
+                             suite_str,
+                             suite_id )
+        self.arch_str = arch_str
+        self.arch_id = arch_id
+
+    def query(self):
+        self.cursor = DBConn().session();
+
+        self.cursor.execute("""SELECT file, component || section || '/' || package
+        FROM deb_contents
+        WHERE ( arch=2 or arch = :arch) AND suite = :suite
+        """, { 'arch':self.arch_id, 'suite':self.suite_id }
+
+class UdebContentFile(ContentFile):
+    def __init__(self,
+                 filename,
+                 suite_str,
+                 suite_id,
+                 section_name,
+                 section_id)
+        ContentFile.__init__(self,
+                             filename,
+                             suite_str,
+                             suite_id )
+
+    def query(self):
+        self.cursor = DBConn().session();
+
+        self.cursor.execute("""SELECT file, component || section || '/' || package
+        FROM udeb_contents
+        WHERE suite = :suite
+        """ , { 'suite': self.suite_id } )
 
 class Contents(object):
     """
@@ -238,9 +377,9 @@ class Contents(object):
 
         s = DBConn().session()
 
-        #        for binary in s.query(DBBinary).all() ):
-        binary = s.query(DBBinary).first()
-        if binary:
+        print( "bootstrap_bin" )
+        for binary in s.query(DBBinary).yield_per(1000):
+            print( "binary: %s" % binary.package )
             filename = binary.poolfile.filename
              # Check for existing contents
             existingq = s.execute( "select 1 from bin_contents where binary_id=:id", {'id':binary.binary_id} );
@@ -262,29 +401,198 @@ class Contents(object):
         """
         scan the existing debs in the pool to populate the contents database tables
         """
-        pooldir = Config()[ 'Dir::Pool' ]
-
         s = DBConn().session()
 
-        for suite in s.query(Suite).all():
-            for arch in get_suite_architectures(suite.suite_name, skipsrc=True, skipall=True, session=s):
-                q = s.query(BinAssociation).join(Suite)
-                q = q.join(Suite).filter_by(suite_name=suite.suite_name)
-                q = q.join(DBBinary).join(Architecture).filter_by(arch.arch_string)
-                for ba in q:
-                    filename = ba.binary.poolfile.filename
-                    # Check for existing contents
-                    existingq = s.query(ContentAssociations).filter_by(binary_pkg=ba.binary_id).limit(1)
-                    if existingq.count() > 0:
-                        log.debug( "already imported: %s" % (filename))
-                    else:
-                        # We don't have existing contents so import them
-                        log.debug( "scanning: %s" % (filename) )
-                        debfile = os.path.join(pooldir, filename)
-                        if os.path.exists(debfile):
-                            Binary(debfile, self.reject).scan_package(ba.binary_id, True)
-                        else:
-                            log.error("missing .deb: %s" % filename)
+        for override in s.query(Override).all():
+            binaries = s.execute("""SELECT b.binary_id, ba.arch
+                                    FROM binaries b
+                                    JOIN bin_associations ba ON ba.binary_id=b.binary_id
+                                    WHERE ba.suite=:suite
+                                    AND b.package=override.package""", {'suite':override.suite})
+            while True:
+                binary = binaries.fetchone()
+                if not binary:
+                    break
+
+                filenames = s.execute( """SELECT file from bin_contents where binary_id=:id""", { 'id': binary.binary_id } )
+                while True:
+                    filename = filenames.fetchone()
+                    if not binary:
+                        break
+
+                
+
+                    if override.type == 7:
+                        s.execute( """INSERT INTO deb_contents (file,section,package,binary_id,arch,suite,component)
+                                      VALUES (:filename, :section, :package, :binary_id, :arch, :suite, :component);""",
+                                   { 'filename' : filename,
+                                     'section' : override.section,
+                                     'package' : override.package,
+                                     'binary_id' : binary.binary_id,
+                                     'arch' : binary.arch,
+                                     'suite' : override.suite,
+                                     'component' : override.component } )
+
+                    
+                    elif override.type == 9:
+                        s.execute( """INSERT INTO deb_contents (file,section,package,binary_id,arch,suite,component)
+                                      VALUES (:filename, :section, :package, :binary_id, :arch, :suite, :component);""",
+                                   { 'filename' : filename,
+                                     'section' : override.section,
+                                     'package' : override.package,
+                                     'binary_id' : binary.binary_id,
+                                     'arch' : binary.arch,
+                                     'suite' : override.suite,
+                                     'component' : override.component } )
+
+#     def bootstrap(self):
+#         """
+#         scan the existing debs in the pool to populate the contents database tables
+#         """
+#         pooldir = Config()[ 'Dir::Pool' ]
+
+#         s = DBConn().session()
+
+#         for suite in s.query(Suite).all():
+#             for arch in get_suite_architectures(suite.suite_name, skipsrc=True, skipall=True, session=s):
+#                 q = s.query(BinAssociation).join(Suite)
+#                 q = q.join(Suite).filter_by(suite_name=suite.suite_name)
+#                 q = q.join(DBBinary).join(Architecture).filter_by(arch.arch_string)
+#                 for ba in q:
+#                     filename = ba.binary.poolfile.filename
+#                     # Check for existing contents
+#                     existingq = s.query(ContentAssociations).filter_by(binary_pkg=ba.binary_id).limit(1)
+#                     if existingq.count() > 0:
+#                         log.debug( "already imported: %s" % (filename))
+#                     else:
+#                         # We don't have existing contents so import them
+#                         log.debug( "scanning: %s" % (filename) )
+#                         debfile = os.path.join(pooldir, filename)
+#                         if os.path.exists(debfile):
+#                             Binary(debfile, self.reject).scan_package(ba.binary_id, True)
+#                         else:
+#                             log.error("missing .deb: %s" % filename)
+    def generate(self):
+        """
+        Generate contents files for both deb and udeb
+        """
+        DBConn().prepare("arches_q", arches_q)
+        self.deb_generate()
+#        self.udeb_generate()
+
+    def deb_generate(self):
+        """
+        Generate Contents-$arch.gz files for every available arch in each given suite.
+        """
+        cursor = DBConn().session()
+        debtype_id = DBConn().get_override_type_id("deb")
+        suites = self._suites()
+
+        inputtoquery = OneAtATime()
+        querytoingest = OneAtATime()
+        ingesttosort = OneAtATime()
+        sorttooutput = OneAtATime()
+        outputtogzip = OneAtATime()
+
+        qt = QueryThread(inputtoquery,querytoingest)
+        it = IngestThread(querytoingest,ingesttosort)
+# these actually make things worse
+#        it2 = IngestThread(querytoingest,ingesttosort)
+#        it3 = IngestThread(querytoingest,ingesttosort)
+#        it4 = IngestThread(querytoingest,ingesttosort)
+        st = SortThread(ingesttosort,sorttooutput)
+        ot = OutputThread(sorttooutput,outputtogzip)
+        gt = GzipThread(outputtogzip, None)
+
+        qt.start()
+        it.start()
+#        it2.start()
+#        it3.start()
+#        it2.start()
+        st.start()
+        ot.start()
+        gt.start()
+        
+        # Get our suites, and the architectures
+        for suite in [i.lower() for i in suites]:
+            suite_id = DBConn().get_suite_id(suite)
+            arch_list = self._arches(cursor, suite_id)
+
+            for (arch_id,arch_str) in arch_list:
+                print( "suite: %s, arch: %s time: %s" %(suite_id, arch_id, datetime.datetime.now().isoformat()) )
+
+#                filename = "dists/%s/Contents-%s.gz" % (suite, arch_str)
+                filename = "dists/%s/Contents-%s" % (suite, arch_str)
+                cf = ContentFile(filename, suite, suite_id, arch_str, arch_id)
+                inputtoquery.enqueue( cf )
+
+        inputtoquery.enqueue( EndOfContents() )
+        gt.join()
+
+    def udeb_generate(self):
+        """
+        Generate Contents-$arch.gz files for every available arch in each given suite.
+        """
+        cursor = DBConn().session()
+        udebtype_id=DBConn().get_override_type_id("udeb")
+        suites = self._suites()
+
+        inputtoquery = OneAtATime()
+        querytoingest = OneAtATime()
+        ingesttosort = OneAtATime()
+        sorttooutput = OneAtATime()
+        outputtogzip = OneAtATime()
+
+        qt = QueryThread(inputtoquery,querytoingest)
+        it = IngestThread(querytoingest,ingesttosort)
+# these actually make things worse
+#        it2 = IngestThread(querytoingest,ingesttosort)
+#        it3 = IngestThread(querytoingest,ingesttosort)
+#        it4 = IngestThread(querytoingest,ingesttosort)
+        st = SortThread(ingesttosort,sorttooutput)
+        ot = OutputThread(sorttooutput,outputtogzip)
+        gt = GzipThread(outputtogzip, None)
+
+        qt.start()
+        it.start()
+#        it2.start()
+#        it3.start()
+#        it2.start()
+        st.start()
+        ot.start()
+        gt.start()
+        
+        for section, fn_pattern in [("debian-installer","dists/%s/Contents-udeb-%s"),
+                                    ("non-free/debian-installer", "dists/%s/Contents-udeb-nf-%s")]:
+
+            section_id = DBConn().get_section_id(section) # all udebs should be here)
+            if section_id != -1:
+
+                
+
+                # Get our suites, and the architectures
+                for suite in [i.lower() for i in suites]:
+                    suite_id = DBConn().get_suite_id(suite)
+                    arch_list = self._arches(cursor, suite_id)
+
+                    for arch_id in arch_list:
+
+                        writer = GzippedContentWriter(fn_pattern % (suite, arch_id[1]))
+                        try:
+
+                            cursor.execute("EXECUTE udeb_contents_q(%d,%d,%d)" % (suite_id, udebtype_id, section_id, arch_id))
+
+                            while True:
+                                r = cursor.fetchone()
+                                if not r:
+                                    break
+
+                                filename, section, package, arch = r
+                                writer.write(filename, section, package)
+                        finally:
+                            writer.close()
+
+
 
 
     def generate(self):
@@ -335,6 +643,34 @@ class Contents(object):
                     # close all the files
                     for writer in file_writers.values():
                         writer.finish()
+    def _suites(self):
+        """
+        return a list of suites to operate on
+        """
+        if Config().has_key( "%s::%s" %(options_prefix,"Suite")):
+            suites = utils.split_args(Config()[ "%s::%s" %(options_prefix,"Suite")])
+        else:
+            suites = [ 'unstable', 'testing' ]
+#            suites = Config().SubTree("Suite").List()
+
+        return suites
+
+    def _arches(self, cursor, suite):
+        """
+        return a list of archs to operate on
+        """
+        arch_list = []
+        cursor.execute("EXECUTE arches_q(%d)" % (suite))
+        while True:
+            r = cursor.fetchone()
+            if not r:
+                break
+
+            if r[1] != "source" and r[1] != "all":
+                arch_list.append((r[0], r[1]))
+
+        return arch_list
+
 
 ################################################################################
 
diff --git a/dak/dakdb/update19.py b/dak/dakdb/update19.py
new file mode 100644
index 0000000..f530375
--- /dev/null
+++ b/dak/dakdb/update19.py
@@ -0,0 +1,239 @@
+#!/usr/bin/env python
+# coding=utf8
+
+"""
+Adding a trainee field to the process-new notes
+
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2009  Mike O'Connor <stew@debian.org>
+@license: GNU General Public License version 2 or later
+"""
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+
+################################################################################
+
+
+################################################################################
+
+import psycopg2
+import time
+from daklib.dak_exceptions import DBUpdateError
+
+################################################################################
+
+def suites():
+    """
+    return a list of suites to operate on
+    """
+    if Config().has_key( "%s::%s" %(options_prefix,"Suite")):
+        suites = utils.split_args(Config()[ "%s::%s" %(options_prefix,"Suite")])
+    else:
+        suites = [ 'unstable', 'testing' ]
+#            suites = Config().SubTree("Suite").List()
+
+    return suites
+
+def arches(cursor, suite):
+    """
+    return a list of archs to operate on
+    """
+    arch_list = []
+    cursor.execute("""SELECT s.architecture, a.arch_string
+    FROM suite_architectures s
+    JOIN architecture a ON (s.architecture=a.id)
+    WHERE suite = :suite""", {'suite' : suite })
+
+    while True:
+        r = cursor.fetchone()
+        if not r:
+            break
+
+        if r[1] != "source" and r[1] != "all":
+            arch_list.append((r[0], r[1]))
+
+    return arch_list
+
+def do_update(self):
+    """
+    Adding contents table as first step to maybe, finally getting rid
+    of apt-ftparchive
+    """
+
+    print __doc__
+
+    try:
+        c = self.db.cursor()
+
+        c.execute("""CREATE TABLE pending_bin_contents (
+        id serial NOT NULL,
+        package text NOT NULL,
+        version debversion NOT NULL,
+        arch int NOT NULL,
+        filename text NOT NULL,
+        type int NOT NULL,
+        PRIMARY KEY(id))""" );
+
+        c.execute("""CREATE TABLE deb_contents (
+        filename text,
+        section text,
+        package text,
+        binary_id integer,
+        arch integer,
+        suite integer,
+        component text)""" )
+
+        c.execute("""CREATE TABLE udeb_contents (
+        filename text,
+        section text,
+        package text,
+        binary_id integer,
+        suite integer,
+        arch integer,
+        component text )""" )
+
+        c.execute("""ALTER TABLE ONLY deb_contents
+        ADD CONSTRAINT deb_contents_arch_fkey
+        FOREIGN KEY (arch) REFERENCES architecture(id)
+        ON DELETE CASCADE;""")
+
+        c.execute("""ALTER TABLE ONLY udeb_contents
+        ADD CONSTRAINT udeb_contents_arch_fkey
+        FOREIGN KEY (arch) REFERENCES architecture(id)
+        ON DELETE CASCADE;""")
+
+        c.execute("""ALTER TABLE ONLY deb_contents
+        ADD CONSTRAINT deb_contents_pkey
+        PRIMARY KEY (filename,package,arch,suite);""")
+
+        c.execute("""ALTER TABLE ONLY udeb_contents
+        ADD CONSTRAINT udeb_contents_pkey
+        PRIMARY KEY (filename,package,arch,suite);""")
+
+        c.execute("""ALTER TABLE ONLY deb_contents
+        ADD CONSTRAINT deb_contents_suite_fkey
+        FOREIGN KEY (suite) REFERENCES suite(id)
+        ON DELETE CASCADE;""")
+
+        c.execute("""ALTER TABLE ONLY udeb_contents
+        ADD CONSTRAINT udeb_contents_suite_fkey
+        FOREIGN KEY (suite) REFERENCES suite(id)
+        ON DELETE CASCADE;""")
+
+        c.execute("""ALTER TABLE ONLY deb_contents
+        ADD CONSTRAINT deb_contents_binary_fkey
+        FOREIGN KEY (binary_id) REFERENCES binaries(id)
+        ON DELETE CASCADE;""")
+
+        c.execute("""ALTER TABLE ONLY udeb_contents
+        ADD CONSTRAINT udeb_contents_binary_fkey
+        FOREIGN KEY (binary_id) REFERENCES binaries(id)
+        ON DELETE CASCADE;""")
+
+        c.execute("""CREATE INDEX ind_deb_contents_binary ON deb_contents(binary_id);""" )
+
+
+        suites = self.suites()
+
+        for suite in [i.lower() for i in suites]:
+            suite_id = DBConn().get_suite_id(suite)
+            arch_list = arches(c, suite_id)
+            arch_list = arches(c, suite_id)
+
+            for (arch_id,arch_str) in arch_list:
+                c.execute( "CREATE INDEX ind_deb_contents_%s_%s ON deb_contents (arch,suite) WHERE (arch=2 OR arch=%d) AND suite=$d"%(arch_str,suite,arch_id,suite_id) )
+
+            for section, sname in [("debian-installer","main"),
+                                  ("non-free/debian-installer", "nonfree")]:
+                c.execute( "CREATE INDEX ind_udeb_contents_%s_%s ON udeb_contents (section,suite) WHERE section=%s AND suite=$d"%(sname,suite,section,suite_id) )
+
+
+        c.execute( """CREATE OR REPLACE FUNCTION update_contents_for_bin_a() RETURNS trigger AS  $$
+    event = TD["event"]
+    if event == "DELETE" or event == "UPDATE":
+
+        plpy.execute(plpy.prepare("DELETE FROM deb_contents WHERE binary_id=$1 and suite=$2",
+                                  ["int","int"]),
+                                  [TD["old"]["bin"], TD["old"]["suite"]])
+
+    if event == "INSERT" or event == "UPDATE":
+
+       content_data = plpy.execute(plpy.prepare(
+            """SELECT s.section, b.package, b.architecture, c.name, ot.type
+            FROM override o
+            JOIN override_type ot on o.type=ot.id
+            JOIN binaries b on b.package=o.package
+            JOIN files f on b.file=f.id
+            JOIN location l on l.id=f.location
+            JOIN section s on s.id=o.section
+            JOIN component c on c.id=l.component
+            WHERE b.id=$1
+            AND o.suite=$2
+            """,
+            ["int", "int"]),
+            [TD["new"]["bin"], TD["new"]["suite"]])[0]
+
+       component_str = "";
+       if not content_data["name"] === "main":
+           component_str=content_data["name"]+"/"
+       
+       filenames = plpy.execute(plpy.prepare(
+           "SELECT bc.file FROM bin_contents bc where bc.binary_id=$1",
+           ["int"]),
+           [TD["new"]["bin"]])
+
+       for filename in filenames:
+           plpy.execute(plpy.prepare(
+               """INSERT INTO deb_contents
+                   (file,section,package,binary_id,arch,suite,component)
+                   VALUES($1,$2,$3,$4,$5,$6,$7)""",
+               ["text","text","text","int","int","int","text"]),
+               [filename["filename"],
+                content_data["section"],
+                content_data["package"],
+                TD["new"]["bin"],
+                content_data["architecture"],
+                TD["new"]["suite"],
+                component_str])
+$$ LANGUAGE plpythonu VOLATILE SECURITY DEFINER;
+""")
+
+
+        c.execute( """CREATE OR REPLACE FUNCTION update_contents_for_override() RETURNS trigger AS  $$
+    event = TD["event"]
+    if event == "UPDATE":
+
+        otype = plpy.execute(plpy.prepare("SELECT type from override_type where id=$1",["int"]),TD["new"]["type"] )[0];
+        if otype["type"].endswith("deb"):
+            table_name = "%s_contents" % otype["type"]
+            plpy.execute(plpy.prepare("UPDATE %s set sections=$1" % table_name
+                                      ["text"]),
+                                      [TD["new"]["section"]])
+
+$$ LANGUAGE plpythonu VOLATILE SECURITY DEFINER;
+""")
+        c.execute( """CREATE TRIGGER bin_associations_contents_trigger
+                      AFTER INSERT OR UPDATE OR DELETE ON bin_associations
+                      FOR EACH ROW EXECUTE PROCEDURE update_contents_for_bin_a();""")
+        c.execute("""CREATE TRIGGER override_contents_trigger
+                      AFTER UPDATE ON override
+                      FOR EACH ROW EXECUTE PROCEDURE update_contents_for_override();""")
+
+        self.db.commit()
+
+    except psycopg2.ProgrammingError, msg:
+        self.db.rollback()
+        raise DBUpdateError, "Unable to apply process-new update 14, rollback issued. Error message : %s" % (str(msg))
+
diff --git a/dak/process_accepted.py b/dak/process_accepted.py
index b203f49..7b78f08 100755
--- a/dak/process_accepted.py
+++ b/dak/process_accepted.py
@@ -311,17 +311,24 @@ def add_deb_to_db(u, filename, session):
     for suite_name in u.pkg.changes["distribution"].keys():
         ba = BinAssociation()
         ba.binary_id = bin.binary_id
-        ba.suite_id = get_suite(suite_name).suite_id
+        suite = get_suite(suite_name)
+        ba.suite_id = suite.suite_id
+
+        component_id = bin.poolfile.location.component_id;
+        component_id = bin.poolfile.location.component_id;
+
+        contents = copy_temporary_contents(bin os.path.basename(filename), None, session)
+        if not contents:
+            print "REJECT\nCould not determine contents of package %s" % bin.package
+            session.rollback()
+            raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
+
+                                                     
         session.add(ba)
 
+
     session.flush()
 
-    # Deal with contents - disabled for now
-    #contents = copy_temporary_contents(bin.package, bin.version, bin.architecture.arch_string, os.path.basename(filename), None, session)
-    #if not contents:
-    #    print "REJECT\nCould not determine contents of package %s" % bin.package
-    #    session.rollback()
-    #    raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
 
 
 def install(u, session, log_urgency=True):
diff --git a/daklib/binary.py b/daklib/binary.py
index 97e3ec0..8a0cf09 100755
--- a/daklib/binary.py
+++ b/daklib/binary.py
@@ -204,7 +204,10 @@ class Binary(object):
                     else:
                         pkgs = deb822.Packages.iter_paragraphs(file(os.path.join(self.tmpdir,'control')))
                         pkg = pkgs.next()
-                        result = insert_pending_content_paths(pkg, [tarinfo.name for tarinfo in data if not tarinfo.isdir()], session)
+                        result = insert_pending_content_paths(pkg,
+                                                              self.filename.endswith('.udeb'),
+                                                              [tarinfo.name for tarinfo in data if not tarinfo.isdir()],
+                                                              session)
 
                 except:
                     traceback.print_exc()
@@ -256,7 +259,8 @@ class Binary(object):
 
 __all__.append('Binary')
 
-def copy_temporary_contents(package, version, archname, deb, reject, session=None):
+
+def copy_temporary_contents(binary, bin_association, reject, session=None):
     """
     copy the previously stored contents from the temp table to the permanant one
 
@@ -273,20 +277,11 @@ def copy_temporary_contents(package, version, archname, deb, reject, session=Non
 
     arch = get_architecture(archname, session=session)
 
-    # first see if contents exist:
-    in_pcaq = """SELECT 1 FROM pending_content_associations
-                               WHERE package=:package
-                               AND version=:version
-                               AND architecture=:archid LIMIT 1"""
-
-    vals = {'package': package,
-            'version': version,
-            'archid': arch.arch_id}
-
-    exists = None
-    check = session.execute(in_pcaq, vals)
+    pending = session.query(PendingBinContents).filter_by(package=binary.package,
+                                                          version=binary.version,
+                                                          arch=binary.arch).first()
 
-    if check.rowcount > 0:
+    if pending:
         # This should NOT happen.  We should have added contents
         # during process-unchecked.  if it did, log an error, and send
         # an email.
@@ -300,23 +295,64 @@ def copy_temporary_contents(package, version, archname, deb, reject, session=Non
         message = utils.TemplateSubst(subst, cnf["Dir::Templates"]+"/missing-contents")
         utils.send_mail(message)
 
-        # Temporarily disable contents storage until we re-do the table layout
-        #exists = Binary(deb, reject).scan_package()
-
-    if exists:
-        sql = """INSERT INTO content_associations(binary_pkg,filepath,filename)
-                 SELECT currval('binaries_id_seq'), filepath, filename FROM pending_content_associations
-                 WHERE package=:package AND version=:version AND architecture=:archid"""
-        session.execute(sql, vals)
-
-        sql = """DELETE from pending_content_associations
-                 WHERE package=:package AND version=:version AND architecture=:archid"""
-        session.execute(sql, vals)
-        session.commit()
+        # rescan it now
+        exists = Binary(deb, reject).scan_package()
+
+        if not exists:
+            # LOG?
+            return False
+
+    component = binary.poolfile.location.component
+    override = session.query(Override).filter_by(package=binary.package,
+                                                 suite=bin_association.suite,
+                                                 component=component.id).first()
+    if not override:
+        # LOG?
+        return False
+
+
+    if not override.overridetype.type.endswith('deb'):
+        return True
+
+    if override.overridetype.type == "udeb":
+        table = "udeb_contents"
+    elif override.overridetype.type == "deb":
+        table = "deb_contents"
+    else:
+        return False
+    
+
+    if component.name == "main":
+        component_str = ""
+    else:
+        component_str = component.name + "/"
+        
+    vals = { 'package':binary.package,
+             'version':binary.version,
+             'arch':binary.architecture,
+             'binary_id': binary.id,
+             'component':component_str,
+             'section':override.section.section
+             }
+
+    session.execute( """INSERT INTO %s
+    (binary_id,package,version.component,arch,section,filename)
+    SELECT :binary_id, :package, :version, :component, :arch, :section
+    FROM pending_bin_contents pbc
+    WHERE pbc.package=:package
+    AND pbc.version=:version
+    AND pbc.arch=:arch""" % table, vals )
+
+    session.execute( """DELETE from pending_bin_contents package=:package
+    AND version=:version
+    AND arch=:arch""", vals )
 
     if privatetrans:
+        session.commit()
         session.close()
 
     return exists
 
 __all__.append('copy_temporary_contents')
+
+
diff --git a/daklib/dbconn.py b/daklib/dbconn.py
index 3c0bc50..34d54b5 100755
--- a/daklib/dbconn.py
+++ b/daklib/dbconn.py
@@ -614,7 +614,7 @@ def insert_content_paths(binary_id, fullpaths, session=None):
         for fullpath in fullpaths:
             if fullpath.startswith( './' ):
                 fullpath = fullpath[2:]
-            
+
             session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )", { 'filename': fullpath, 'id': binary_id}  )
 
         session.commit()
@@ -1167,16 +1167,38 @@ __all__.append('get_override_type')
 
 ################################################################################
 
-class PendingContentAssociation(object):
+class DebContents(object):
     def __init__(self, *args, **kwargs):
         pass
 
     def __repr__(self):
-        return '<PendingContentAssociation %s>' % self.pca_id
+        return '<DebConetnts %s: %s>' % (self.package.package,self.file)
+
+__all__.append('DebContents')
+
+
+class UdebContents(object):
+    def __init__(self, *args, **kwargs):
+        pass
 
-__all__.append('PendingContentAssociation')
+    def __repr__(self):
+        return '<UdebConetnts %s: %s>' % (self.package.package,self.file)
+
+__all__.append('UdebContents')
+
+class PendingBinContents(object):
+    def __init__(self, *args, **kwargs):
+        pass
 
-def insert_pending_content_paths(package, fullpaths, session=None):
+    def __repr__(self):
+        return '<PendingBinContents %s>' % self.contents_id
+
+__all__.append('PendingBinContents')
+
+def insert_pending_content_paths(package,
+                                 is_udeb,
+                                 fullpaths,
+                                 session=None):
     """
     Make sure given paths are temporarily associated with given
     package
@@ -1205,32 +1227,27 @@ def insert_pending_content_paths(package, fullpaths, session=None):
         arch_id = arch.arch_id
 
         # Remove any already existing recorded files for this package
-        q = session.query(PendingContentAssociation)
+        q = session.query(PendingBinContents)
         q = q.filter_by(package=package['Package'])
         q = q.filter_by(version=package['Version'])
         q = q.filter_by(architecture=arch_id)
         q.delete()
 
-        # Insert paths
-        pathcache = {}
         for fullpath in fullpaths:
-            (path, file) = os.path.split(fullpath)
 
-            if path.startswith( "./" ):
-                path = path[2:]
-
-            filepath_id = get_or_set_contents_path_id(path, session)
-            filename_id = get_or_set_contents_file_id(file, session)
-
-            pathcache[fullpath] = (filepath_id, filename_id)
+            if fullpath.startswith( "./" ):
+                fullpath = fullpath[2:]
 
-        for fullpath, dat in pathcache.items():
-            pca = PendingContentAssociation()
+            pca = PendingBinContents()
             pca.package = package['Package']
             pca.version = package['Version']
-            pca.filepath_id = dat[0]
-            pca.filename_id = dat[1]
+            pca.file = fullpath
             pca.architecture = arch_id
+
+            if isudeb: 
+                pca.type = 8 # gross
+            else:
+                pca.type = 7 # also gross
             session.add(pca)
 
         # Only commit if we set up the session ourself
@@ -2072,6 +2089,7 @@ class DBConn(Singleton):
     def __setuptables(self):
         self.tbl_architecture = Table('architecture', self.db_meta, autoload=True)
         self.tbl_archive = Table('archive', self.db_meta, autoload=True)
+        self.tbl_bin_contents = Table('bin_contents', self.db_meta, autoload=True)
         self.tbl_bin_associations = Table('bin_associations', self.db_meta, autoload=True)
         self.tbl_binaries = Table('binaries', self.db_meta, autoload=True)
         self.tbl_component = Table('component', self.db_meta, autoload=True)
@@ -2080,6 +2098,7 @@ class DBConn(Singleton):
         self.tbl_content_file_names = Table('content_file_names', self.db_meta, autoload=True)
         self.tbl_content_file_paths = Table('content_file_paths', self.db_meta, autoload=True)
         self.tbl_dsc_files = Table('dsc_files', self.db_meta, autoload=True)
+        self.tbl_deb_contents = Table('deb_contents', self.db_meta, autoload=True)
         self.tbl_files = Table('files', self.db_meta, autoload=True)
         self.tbl_fingerprint = Table('fingerprint', self.db_meta, autoload=True)
         self.tbl_keyrings = Table('keyrings', self.db_meta, autoload=True)
@@ -2088,7 +2107,7 @@ class DBConn(Singleton):
         self.tbl_new_comments = Table('new_comments', self.db_meta, autoload=True)
         self.tbl_override = Table('override', self.db_meta, autoload=True)
         self.tbl_override_type = Table('override_type', self.db_meta, autoload=True)
-        self.tbl_pending_content_associations = Table('pending_content_associations', self.db_meta, autoload=True)
+        self.tbl_pending_bin_contents = Table('pending_bin_contents', self.db_meta, autoload=True)
         self.tbl_priority = Table('priority', self.db_meta, autoload=True)
         self.tbl_queue = Table('queue', self.db_meta, autoload=True)
         self.tbl_queue_build = Table('queue_build', self.db_meta, autoload=True)
@@ -2117,6 +2136,29 @@ class DBConn(Singleton):
                                  binary_id = self.tbl_bin_associations.c.bin,
                                  binary = relation(DBBinary)))
 
+        mapper(PendingBinContents, self.tbl_pending_bin_contents,
+               properties = dict(contents_id =self.tbl_pending_bin_contents.c.id,
+                                 filename = self.tbl_pending_bin_contents.c.filename,
+                                 package = self.tbl_pending_bin_contents.c.package,
+                                 version = self.tbl_pending_bin_contents.c.version,
+                                 arch = self.tbl_pending_bin_contents.c.arch,
+                                 otype = self.tbl_pending_bin_contents.c.type))
+
+        mapper(DebContents, self.tbl_deb_contents,
+               properties = dict(binary_id=self.tbl_deb_contents.c.binary_id,
+                                 package=self.tbl_deb_contents.c.package,
+                                 component=self.tbl_deb_contents.c.component,
+                                 arch=self.tbl_deb_contents.c.arch,
+                                 section=self.tbl_deb_contents.c.section,
+                                 filename=self.tbl_deb_contents.c.filename))
+
+        mapper(UdebContents, self.tbl_udeb_contents,
+               properties = dict(binary_id=self.tbl_udeb_contents.c.binary_id,
+                                 package=self.tbl_udeb_contents.c.package,
+                                 component=self.tbl_udeb_contents.c.component,
+                                 arch=self.tbl_udeb_contents.c.arch,
+                                 section=self.tbl_udeb_contents.c.section,
+                                 filename=self.tbl_udeb_contents.c.filename))
 
         mapper(DBBinary, self.tbl_binaries,
                properties = dict(binary_id = self.tbl_binaries.c.id,
@@ -2210,7 +2252,8 @@ class DBConn(Singleton):
                                  queue = relation(Queue, backref='queuebuild')))
 
         mapper(Section, self.tbl_section,
-               properties = dict(section_id = self.tbl_section.c.id))
+               properties = dict(section_id = self.tbl_section.c.id,
+                                 section=self.tbl_section.c.section))
 
         mapper(DBSource, self.tbl_source,
                properties = dict(source_id = self.tbl_source.c.id,
-- 
1.6.5



Reply to: