[Date Prev][Date Next] [Thread Prev][Thread Next] [Date Index] [Thread Index]

[dak/master] pychecker 2



a whole bunch more pychecker warnings. (Thanks to bicyclerepair, those are the easy ones)

Signed-off-by: Joerg Jaspert <joerg@debian.org>
---
 dak/generate_releases.py    |   24 ++++----
 dak/import_keyring.py       |   46 ++++++++--------
 dak/make_maintainers.py     |    6 +-
 dak/make_suite_file_list.py |   59 ++++++++++----------
 dak/override.py             |    6 +-
 dak/process_accepted.py     |  128 +++++++++++++++++++++---------------------
 dak/process_new.py          |    2 +
 dak/update_db.py            |    1 +
 8 files changed, 137 insertions(+), 135 deletions(-)

diff --git a/dak/generate_releases.py b/dak/generate_releases.py
index 8d0a3d8..a155245 100755
--- a/dak/generate_releases.py
+++ b/dak/generate_releases.py
@@ -235,8 +235,8 @@ def main ():
                 for arch in AptCnf["tree::%s::Architectures" % (tree)].split():
                     if arch == "source":
                         filepath = "%s/%s/Sources" % (sec, arch)
-                        for file in compressnames("tree::%s" % (tree), "Sources", filepath):
-                            files.append(file)
+                        for cfile in compressnames("tree::%s" % (tree), "Sources", filepath):
+                            files.append(cfile)
                         add_tiffani(files, Cnf["Dir::Root"] + tree, filepath)
                     else:
                         disks = "%s/disks-%s" % (sec, arch)
@@ -247,8 +247,8 @@ def main ():
                                     files.append("%s/%s/md5sum.txt" % (disks, dir))
 
                         filepath = "%s/binary-%s/Packages" % (sec, arch)
-                        for file in compressnames("tree::%s" % (tree), "Packages", filepath):
-                            files.append(file)
+                        for cfile in compressnames("tree::%s" % (tree), "Packages", filepath):
+                            files.append(cfile)
                         add_tiffani(files, Cnf["Dir::Root"] + tree, filepath)
 
                     if arch == "source":
@@ -290,10 +290,10 @@ def main ():
 
                     for arch in AptCnf["tree::%s/%s::Architectures" % (tree,dis)].split():
                         if arch != "source":  # always true
-                            for file in compressnames("tree::%s/%s" % (tree,dis),
+                            for cfile in compressnames("tree::%s/%s" % (tree,dis),
                                 "Packages",
                                 "%s/%s/binary-%s/Packages" % (dis, sec, arch)):
-                                files.append(file)
+                                files.append(cfile)
             elif AptCnf.has_key("tree::%s::FakeDI" % (tree)):
                 usetree = AptCnf["tree::%s::FakeDI" % (tree)]
                 sec = AptCnf["tree::%s/main::Sections" % (usetree)].split()[0]
@@ -302,14 +302,14 @@ def main ():
 
                 for arch in AptCnf["tree::%s/main::Architectures" % (usetree)].split():
                     if arch != "source":  # always true
-                        for file in compressnames("tree::%s/main" % (usetree), "Packages", "main/%s/binary-%s/Packages" % (sec, arch)):
-                            files.append(file)
+                        for cfile in compressnames("tree::%s/main" % (usetree), "Packages", "main/%s/binary-%s/Packages" % (sec, arch)):
+                            files.append(cfile)
 
         elif AptCnf.has_key("bindirectory::%s" % (tree)):
-            for file in compressnames("bindirectory::%s" % (tree), "Packages", AptCnf["bindirectory::%s::Packages" % (tree)]):
-                files.append(file.replace(tree+"/","",1))
-            for file in compressnames("bindirectory::%s" % (tree), "Sources", AptCnf["bindirectory::%s::Sources" % (tree)]):
-                files.append(file.replace(tree+"/","",1))
+            for cfile in compressnames("bindirectory::%s" % (tree), "Packages", AptCnf["bindirectory::%s::Packages" % (tree)]):
+                files.append(cfile.replace(tree+"/","",1))
+            for cfile in compressnames("bindirectory::%s" % (tree), "Sources", AptCnf["bindirectory::%s::Sources" % (tree)]):
+                files.append(cfile.replace(tree+"/","",1))
         else:
             print "ALERT: no tree/bindirectory for %s" % (tree)
 
diff --git a/dak/import_keyring.py b/dak/import_keyring.py
index 602eb37..c8013d6 100755
--- a/dak/import_keyring.py
+++ b/dak/import_keyring.py
@@ -35,9 +35,9 @@ def get_uid_info():
     byname = {}
     byid = {}
     q = projectB.query("SELECT id, uid, name FROM uid")
-    for (id, uid, name) in q.getresult():
-        byname[uid] = (id, name)
-        byid[id] = (uid, name)
+    for (keyid, uid, name) in q.getresult():
+        byname[uid] = (keyid, name)
+        byid[keyid] = (uid, name)
     return (byname, byid)
 
 def get_fingerprint_info():
@@ -131,16 +131,16 @@ class Keyring:
             uid = entry["uid"][0]
             name = get_ldap_name(entry)
             fingerprints = entry["keyFingerPrint"]
-            id = None
+            keyid = None
             for f in fingerprints:
                 key = fpr_lookup.get(f, None)
                 if key not in keys: continue
                 keys[key]["uid"] = uid
 
-                if id != None: continue
-                id = database.get_or_set_uid_id(uid)
-                byuid[id] = (uid, name)
-                byname[uid] = (id, name)
+                if keyid != None: continue
+                keyid = database.get_or_set_uid_id(uid)
+                byuid[keyid] = (uid, name)
+                byname[uid] = (keyid, name)
 
         return (byname, byuid)
 
@@ -155,15 +155,15 @@ class Keyring:
                 keys[x]["uid"] = format % "invalid-uid"
             else:
                 uid = format % keys[x]["email"]
-                id = database.get_or_set_uid_id(uid)
-                byuid[id] = (uid, keys[x]["name"])
-                byname[uid] = (id, keys[x]["name"])
+                keyid = database.get_or_set_uid_id(uid)
+                byuid[keyid] = (uid, keys[x]["name"])
+                byname[uid] = (keyid, keys[x]["name"])
                 keys[x]["uid"] = uid
         if any_invalid:
             uid = format % "invalid-uid"
-            id = database.get_or_set_uid_id(uid)
-            byuid[id] = (uid, "ungeneratable user id")
-            byname[uid] = (id, "ungeneratable user id")
+            keyid = database.get_or_set_uid_id(uid)
+            byuid[keyid] = (uid, "ungeneratable user id")
+            byname[uid] = (keyid, "ungeneratable user id")
         return (byname, byuid)
 
 ################################################################################
@@ -237,14 +237,14 @@ def main():
     (db_uid_byname, db_uid_byid) = get_uid_info()
 
     ### Update full names of applicable users
-    for id in desuid_byid.keys():
-        uid = (id, desuid_byid[id][0])
-        name = desuid_byid[id][1]
-        oname = db_uid_byid[id][1]
+    for keyid in desuid_byid.keys():
+        uid = (keyid, desuid_byid[keyid][0])
+        name = desuid_byid[keyid][1]
+        oname = db_uid_byid[keyid][1]
         if name and oname != name:
             changes.append((uid[1], "Full name: %s" % (name)))
             projectB.query("UPDATE uid SET name = '%s' WHERE id = %s" %
-                (pg.escape_string(name), id))
+                (pg.escape_string(name), keyid))
 
     # The fingerprint table (fpr) points to a uid and a keyring.
     #   If the uid is being decided here (ldap/generate) we set it to it.
@@ -254,11 +254,11 @@ def main():
 
     fpr = {}
     for z in keyring.keys.keys():
-        id = db_uid_byname.get(keyring.keys[z].get("uid", None), [None])[0]
-        if id == None:
-            id = db_fin_info.get(keyring.keys[z]["fingerprints"][0], [None])[0]
+        keyid = db_uid_byname.get(keyring.keys[z].get("uid", None), [None])[0]
+        if keyid == None:
+            keyid = db_fin_info.get(keyring.keys[z]["fingerprints"][0], [None])[0]
         for y in keyring.keys[z]["fingerprints"]:
-            fpr[y] = (id,keyring_id)
+            fpr[y] = (keyid,keyring_id)
 
     # For any keys that used to be in this keyring, disassociate them.
     # We don't change the uid, leaving that for historical info; if
diff --git a/dak/make_maintainers.py b/dak/make_maintainers.py
index 090b8d4..679ed22 100755
--- a/dak/make_maintainers.py
+++ b/dak/make_maintainers.py
@@ -130,8 +130,8 @@ def main():
 
     # Process any additional Maintainer files (e.g. from pseudo packages)
     for filename in extra_files:
-        file = utils.open_file(filename)
-        for line in file.readlines():
+        extrafile = utils.open_file(filename)
+        for line in extrafile.readlines():
             line = utils.re_comments.sub('', line).strip()
             if line == "":
                 continue
@@ -147,7 +147,7 @@ def main():
             if not packages.has_key(package) or version == '*' \
                or apt_pkg.VersionCompare(packages[package]["version"], version) < 0:
                 packages[package] = { "maintainer": maintainer, "version": version }
-        file.close()
+        extrafile.close()
 
     package_keys = packages.keys()
     package_keys.sort()
diff --git a/dak/make_suite_file_list.py b/dak/make_suite_file_list.py
index 41e6cb3..dbbab7e 100755
--- a/dak/make_suite_file_list.py
+++ b/dak/make_suite_file_list.py
@@ -201,10 +201,10 @@ def write_legacy_mixed_filelist(suite, list, packages, dislocated_files):
     output = utils.open_file(filename, "w")
     # Generate the final list of files
     files = {}
-    for id in list:
-        path = packages[id]["path"]
-        filename = packages[id]["filename"]
-        file_id = packages[id]["file_id"]
+    for fileid in list:
+        path = packages[fileid]["path"]
+        filename = packages[fileid]["filename"]
+        file_id = packages[fileid]["file_id"]
         if suite == "stable" and dislocated_files.has_key(file_id):
             filename = dislocated_files[file_id]
         else:
@@ -217,8 +217,8 @@ def write_legacy_mixed_filelist(suite, list, packages, dislocated_files):
     keys = files.keys()
     keys.sort()
     # Write the list of files out
-    for file in keys:
-        output.write(file+'\n')
+    for outfile in keys:
+        output.write(outfile+'\n')
     output.close()
 
 ############################################################
@@ -234,11 +234,11 @@ def write_filelist(suite, component, arch, type, list, packages, dislocated_file
     output = utils.open_file(filename, "w")
     # Generate the final list of files
     files = {}
-    for id in list:
-        path = packages[id]["path"]
-        filename = packages[id]["filename"]
-        file_id = packages[id]["file_id"]
-        pkg = packages[id]["pkg"]
+    for fileid in list:
+        path = packages[fileid]["path"]
+        filename = packages[fileid]["filename"]
+        file_id = packages[fileid]["file_id"]
+        pkg = packages[fileid]["pkg"]
         if suite == "stable" and dislocated_files.has_key(file_id):
             filename = dislocated_files[file_id]
         else:
@@ -264,12 +264,12 @@ def write_filelists(packages, dislocated_files):
         suite = packages[unique_id]["suite"]
         component = packages[unique_id]["component"]
         arch = packages[unique_id]["arch"]
-        type = packages[unique_id]["type"]
+        packagetype = packages[unique_id]["type"]
         d.setdefault(suite, {})
         d[suite].setdefault(component, {})
         d[suite][component].setdefault(arch, {})
-        d[suite][component][arch].setdefault(type, [])
-        d[suite][component][arch][type].append(unique_id)
+        d[suite][component][arch].setdefault(packagetype, [])
+        d[suite][component][arch][packagetype].append(unique_id)
     # Flesh out the index
     if not Options["Suite"]:
         suites = Cnf.SubTree("Suite").List()
@@ -282,7 +282,6 @@ def write_filelists(packages, dislocated_files):
         else:
             components = utils.split_args(Options["Component"])
         udeb_components = Cnf.ValueList("Suite::%s::UdebComponents" % (suite))
-        udeb_components = udeb_components
         for component in components:
             d[suite].setdefault(component, {})
             if component in udeb_components:
@@ -299,8 +298,8 @@ def write_filelists(packages, dislocated_files):
                     types = [ "dsc" ]
                 else:
                     types = binary_types
-                for type in types:
-                    d[suite][component][arch].setdefault(type, [])
+                for packagetype in types:
+                    d[suite][component][arch].setdefault(packagetype, [])
     # Then walk it
     for suite in d.keys():
         if Cnf.has_key("Suite::%s::Components" % (suite)):
@@ -308,25 +307,25 @@ def write_filelists(packages, dislocated_files):
                 for arch in d[suite][component].keys():
                     if arch == "all":
                         continue
-                    for type in d[suite][component][arch].keys():
-                        list = d[suite][component][arch][type]
+                    for packagetype in d[suite][component][arch].keys():
+                        filelist = d[suite][component][arch][packagetype]
                         # If it's a binary, we need to add in the arch: all debs too
                         if arch != "source":
                             archall_suite = Cnf.get("Make-Suite-File-List::ArchAllMap::%s" % (suite))
                             if archall_suite:
-                                list.extend(d[archall_suite][component]["all"][type])
+                                filelist.extend(d[archall_suite][component]["all"][packagetype])
                             elif d[suite][component].has_key("all") and \
-                                     d[suite][component]["all"].has_key(type):
-                                list.extend(d[suite][component]["all"][type])
-                        write_filelist(suite, component, arch, type, list,
+                                     d[suite][component]["all"].has_key(packagetype):
+                                filelist.extend(d[suite][component]["all"][packagetype])
+                        write_filelist(suite, component, arch, packagetype, filelist,
                                        packages, dislocated_files)
         else: # legacy-mixed suite
-            list = []
+            filelist = []
             for component in d[suite].keys():
                 for arch in d[suite][component].keys():
-                    for type in d[suite][component][arch].keys():
-                        list.extend(d[suite][component][arch][type])
-            write_legacy_mixed_filelist(suite, list, packages, dislocated_files)
+                    for packagetype in d[suite][component][arch].keys():
+                        filelist.extend(d[suite][component][arch][packagetype])
+            write_legacy_mixed_filelist(suite, filelist, packages, dislocated_files)
 
 ################################################################################
 
@@ -369,13 +368,13 @@ SELECT s.id, s.source, 'source', s.version, l.path, f.filename, c.name, f.id,
     packages = {}
     unique_id = 0
     for i in ql:
-        (id, pkg, arch, version, path, filename, component, file_id, suite, type) = i
+        (sourceid, pkg, arch, version, path, filename, component, file_id, suite, filetype) = i
         # 'id' comes from either 'binaries' or 'source', so it's not unique
         unique_id += 1
-        packages[unique_id] = Dict(id=id, pkg=pkg, arch=arch, version=version,
+        packages[unique_id] = Dict(sourceid=sourceid, pkg=pkg, arch=arch, version=version,
                                    path=path, filename=filename,
                                    component=component, file_id=file_id,
-                                   suite=suite, type = type)
+                                   suite=suite, filetype = filetype)
     cleanup(packages)
     write_filelists(packages, dislocated_files)
 
diff --git a/dak/override.py b/dak/override.py
index f98f03c..0bda5e7 100755
--- a/dak/override.py
+++ b/dak/override.py
@@ -107,9 +107,9 @@ def main ():
 
     # Retrieve current section/priority...
     oldsection, oldsourcesection, oldpriority = None, None, None
-    for type in ['source', 'binary']:
+    for packagetype in ['source', 'binary']:
         eqdsc = '!='
-        if type == 'source':
+        if packagetype == 'source':
             eqdsc = '='
         q = projectB.query("""
     SELECT priority.priority AS prio, section.section AS sect, override_type.type AS type
@@ -129,7 +129,7 @@ def main ():
             utils.fubar("%s is ambiguous. Matches %d packages" % (package,q.ntuples()))
 
         r = q.getresult()
-        if type == 'binary':
+        if packagetype == 'binary':
             oldsection = r[0][1]
             oldpriority = r[0][0]
         else:
diff --git a/dak/process_accepted.py b/dak/process_accepted.py
index ea238ef..a25c391 100755
--- a/dak/process_accepted.py
+++ b/dak/process_accepted.py
@@ -109,32 +109,32 @@ def reject (str, prefix="Rejected: "):
 def check():
     propogate={}
     nopropogate={}
-    for file in files.keys():
+    for checkfile in files.keys():
         # The .orig.tar.gz can disappear out from under us is it's a
         # duplicate of one in the archive.
-        if not files.has_key(file):
+        if not files.has_key(checkfile):
             continue
         # Check that the source still exists
-        if files[file]["type"] == "deb":
-            source_version = files[file]["source version"]
-            source_package = files[file]["source package"]
+        if files[checkfile]["type"] == "deb":
+            source_version = files[checkfile]["source version"]
+            source_package = files[checkfile]["source package"]
             if not changes["architecture"].has_key("source") \
                and not Upload.source_exists(source_package, source_version,  changes["distribution"].keys()):
-                reject("no source found for %s %s (%s)." % (source_package, source_version, file))
+                reject("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
 
         # Version and file overwrite checks
         if not installing_to_stable:
-            if files[file]["type"] == "deb":
-                reject(Upload.check_binary_against_db(file), "")
-            elif files[file]["type"] == "dsc":
-                reject(Upload.check_source_against_db(file), "")
-                (reject_msg, is_in_incoming) = Upload.check_dsc_against_db(file)
+            if files[checkfile]["type"] == "deb":
+                reject(Upload.check_binary_against_db(checkfile), "")
+            elif files[checkfile]["type"] == "dsc":
+                reject(Upload.check_source_against_db(checkfile), "")
+                (reject_msg, is_in_incoming) = Upload.check_dsc_against_db(checkfile)
                 reject(reject_msg, "")
 
         # propogate in the case it is in the override tables:
         if changes.has_key("propdistribution"):
             for suite in changes["propdistribution"].keys():
-                if Upload.in_override_p(files[file]["package"], files[file]["component"], suite, files[file].get("dbtype",""), file):
+                if Upload.in_override_p(files[checkfile]["package"], files[checkfile]["component"], suite, files[checkfile].get("dbtype",""), checkfile):
                     propogate[suite] = 1
                 else:
                     nopropogate[suite] = 1
@@ -144,11 +144,11 @@ def check():
             continue
         changes["distribution"][suite] = 1
 
-    for file in files.keys():
+    for checkfile in files.keys():
         # Check the package is still in the override tables
         for suite in changes["distribution"].keys():
-            if not Upload.in_override_p(files[file]["package"], files[file]["component"], suite, files[file].get("dbtype",""), file):
-                reject("%s is NEW for %s." % (file, suite))
+            if not Upload.in_override_p(files[checkfile]["package"], files[checkfile]["component"], suite, files[checkfile].get("dbtype",""), checkfile):
+                reject("%s is NEW for %s." % (checkfile, suite))
 
 ###############################################################################
 
@@ -283,8 +283,8 @@ def install ():
         return
 
     # Add the .dsc file to the DB
-    for file in files.keys():
-        if files[file]["type"] == "dsc":
+    for newfile in files.keys():
+        if files[newfile]["type"] == "dsc":
             package = dsc["source"]
             version = dsc["version"]  # NB: not files[file]["version"], that has no epoch
             maintainer = dsc["maintainer"]
@@ -295,26 +295,26 @@ def install ():
             changedby_id = database.get_or_set_maintainer_id(changedby)
             fingerprint_id = database.get_or_set_fingerprint_id(dsc["fingerprint"])
             install_date = time.strftime("%Y-%m-%d")
-            filename = files[file]["pool name"] + file
-            dsc_component = files[file]["component"]
-            dsc_location_id = files[file]["location id"]
+            filename = files[newfile]["pool name"] + newfile
+            dsc_component = files[newfile]["component"]
+            dsc_location_id = files[newfile]["location id"]
             if dsc.has_key("dm-upload-allowed") and  dsc["dm-upload-allowed"] == "yes":
                 dm_upload_allowed = "true"
             else:
                 dm_upload_allowed = "false"
-            if not files[file].has_key("files id") or not files[file]["files id"]:
-                files[file]["files id"] = database.set_files_id (filename, files[file]["size"], files[file]["md5sum"], files[file]["sha1sum"], files[file]["sha256sum"], dsc_location_id)
+            if not files[newfile].has_key("files id") or not files[newfile]["files id"]:
+                files[newfile]["files id"] = database.set_files_id (filename, files[newfile]["size"], files[newfile]["md5sum"], files[newfile]["sha1sum"], files[newfile]["sha256sum"], dsc_location_id)
             projectB.query("INSERT INTO source (source, version, maintainer, changedby, file, install_date, sig_fpr, dm_upload_allowed) VALUES ('%s', '%s', %d, %d, %d, '%s', %s, %s)"
-                           % (package, version, maintainer_id, changedby_id, files[file]["files id"], install_date, fingerprint_id, dm_upload_allowed))
+                           % (package, version, maintainer_id, changedby_id, files[newfile]["files id"], install_date, fingerprint_id, dm_upload_allowed))
 
             for suite in changes["distribution"].keys():
                 suite_id = database.get_suite_id(suite)
                 projectB.query("INSERT INTO src_associations (suite, source) VALUES (%d, currval('source_id_seq'))" % (suite_id))
 
             # Add the source files to the DB (files and dsc_files)
-            projectB.query("INSERT INTO dsc_files (source, file) VALUES (currval('source_id_seq'), %d)" % (files[file]["files id"]))
+            projectB.query("INSERT INTO dsc_files (source, file) VALUES (currval('source_id_seq'), %d)" % (files[newfile]["files id"]))
             for dsc_file in dsc_files.keys():
-                filename = files[file]["pool name"] + dsc_file
+                filename = files[newfile]["pool name"] + dsc_file
                 # If the .orig.tar.gz is already in the pool, it's
                 # files id is stored in dsc_files by check_dsc().
                 files_id = dsc_files[dsc_file].get("files id", None)
@@ -343,30 +343,30 @@ def install ():
 
 
     # Add the .deb files to the DB
-    for file in files.keys():
-        if files[file]["type"] == "deb":
-            package = files[file]["package"]
-            version = files[file]["version"]
-            maintainer = files[file]["maintainer"]
+    for newfile in files.keys():
+        if files[newfile]["type"] == "deb":
+            package = files[newfile]["package"]
+            version = files[newfile]["version"]
+            maintainer = files[newfile]["maintainer"]
             maintainer = maintainer.replace("'", "\\'")
             maintainer_id = database.get_or_set_maintainer_id(maintainer)
             fingerprint_id = database.get_or_set_fingerprint_id(changes["fingerprint"])
-            architecture = files[file]["architecture"]
+            architecture = files[newfile]["architecture"]
             architecture_id = database.get_architecture_id (architecture)
-            type = files[file]["dbtype"]
-            source = files[file]["source package"]
-            source_version = files[file]["source version"]
-            filename = files[file]["pool name"] + file
-            if not files[file].has_key("location id") or not files[file]["location id"]:
-                files[file]["location id"] = database.get_location_id(Cnf["Dir::Pool"],files[file]["component"],utils.where_am_i())
-            if not files[file].has_key("files id") or not files[file]["files id"]:
-                files[file]["files id"] = database.set_files_id (filename, files[file]["size"], files[file]["md5sum"], files[file]["sha1sum"], files[file]["sha256sum"], files[file]["location id"])
+            filetype = files[newfile]["dbtype"]
+            source = files[newfile]["source package"]
+            source_version = files[newfile]["source version"]
+            filename = files[newfile]["pool name"] + newfile
+            if not files[newfile].has_key("location id") or not files[newfile]["location id"]:
+                files[newfile]["location id"] = database.get_location_id(Cnf["Dir::Pool"],files[newfile]["component"],utils.where_am_i())
+            if not files[newfile].has_key("files id") or not files[newfile]["files id"]:
+                files[newfile]["files id"] = database.set_files_id (filename, files[newfile]["size"], files[newfile]["md5sum"], files[newfile]["sha1sum"], files[newfile]["sha256sum"], files[newfile]["location id"])
             source_id = database.get_source_id (source, source_version)
             if source_id:
                 projectB.query("INSERT INTO binaries (package, version, maintainer, source, architecture, file, type, sig_fpr) VALUES ('%s', '%s', %d, %d, %d, %d, '%s', %d)"
-                               % (package, version, maintainer_id, source_id, architecture_id, files[file]["files id"], type, fingerprint_id))
+                               % (package, version, maintainer_id, source_id, architecture_id, files[newfile]["files id"], filetype, fingerprint_id))
             else:
-                raise NoSourceFieldError, "Unable to find a source id for %s (%s), %s, file %s, type %s, signed by %s" % (package, version, architecture, file, type, sig_fpr)
+                raise NoSourceFieldError, "Unable to find a source id for %s (%s), %s, file %s, type %s, signed by %s" % (package, version, architecture, newfile, filetype, changes["fingerprint"])
             for suite in changes["distribution"].keys():
                 suite_id = database.get_suite_id(suite)
                 projectB.query("INSERT INTO bin_associations (suite, bin) VALUES (%d, currval('binaries_id_seq'))" % (suite_id))
@@ -386,7 +386,7 @@ def install ():
                 continue
             # First move the files to the new location
             legacy_filename = qid["path"] + qid["filename"]
-            pool_location = utils.poolify (changes["source"], files[file]["component"])
+            pool_location = utils.poolify (changes["source"], files[newfile]["component"])
             pool_filename = pool_location + os.path.basename(qid["filename"])
             destination = Cnf["Dir::Pool"] + pool_location
             utils.move(legacy_filename, destination)
@@ -414,11 +414,11 @@ def install ():
             projectB.query("UPDATE dsc_files SET file = %s WHERE source = %s AND file = %s" % (new_files_id, database.get_source_id(changes["source"], changes["version"]), orig_tar_id))
 
     # Install the files into the pool
-    for file in files.keys():
-        destination = Cnf["Dir::Pool"] + files[file]["pool name"] + file
-        utils.move(file, destination)
-        Logger.log(["installed", file, files[file]["type"], files[file]["size"], files[file]["architecture"]])
-        install_bytes += float(files[file]["size"])
+    for newfile in files.keys():
+        destination = Cnf["Dir::Pool"] + files[newfile]["pool name"] + newfile
+        utils.move(newfile, destination)
+        Logger.log(["installed", newfile, files[newfile]["type"], files[newfile]["size"], files[newfile]["architecture"]])
+        install_bytes += float(files[newfile]["size"])
 
     # Copy the .changes file across for suite which need it.
     copy_changes = {}
@@ -457,14 +457,14 @@ def install ():
         dest_dir = Cnf["Dir::QueueBuild"]
         if Cnf.FindB("Dinstall::SecurityQueueBuild"):
             dest_dir = os.path.join(dest_dir, suite)
-        for file in files.keys():
-            dest = os.path.join(dest_dir, file)
+        for newfile in files.keys():
+            dest = os.path.join(dest_dir, newfile)
             # Remove it from the list of packages for later processing by apt-ftparchive
             projectB.query("UPDATE queue_build SET in_queue = 'f', last_used = '%s' WHERE filename = '%s' AND suite = %s" % (now_date, dest, suite_id))
             if not Cnf.FindB("Dinstall::SecurityQueueBuild"):
                 # Update the symlink to point to the new location in the pool
-                pool_location = utils.poolify (changes["source"], files[file]["component"])
-                src = os.path.join(Cnf["Dir::Pool"], pool_location, os.path.basename(file))
+                pool_location = utils.poolify (changes["source"], files[newfile]["component"])
+                src = os.path.join(Cnf["Dir::Pool"], pool_location, os.path.basename(newfile))
                 if os.path.islink(dest):
                     os.unlink(dest)
                 os.symlink(src, dest)
@@ -493,8 +493,8 @@ def stable_install (summary, short_summary):
     projectB.query("BEGIN WORK")
 
     # Add the source to stable (and remove it from proposed-updates)
-    for file in files.keys():
-        if files[file]["type"] == "dsc":
+    for newfile in files.keys():
+        if files[newfile]["type"] == "dsc":
             package = dsc["source"]
             version = dsc["version"];  # NB: not files[file]["version"], that has no epoch
             q = projectB.query("SELECT id FROM source WHERE source = '%s' AND version = '%s'" % (package, version))
@@ -508,11 +508,11 @@ def stable_install (summary, short_summary):
             projectB.query("INSERT INTO src_associations (suite, source) VALUES ('%s', '%s')" % (suite_id, source_id))
 
     # Add the binaries to stable (and remove it/them from proposed-updates)
-    for file in files.keys():
-        if files[file]["type"] == "deb":
-            package = files[file]["package"]
-            version = files[file]["version"]
-            architecture = files[file]["architecture"]
+    for newfile in files.keys():
+        if files[newfile]["type"] == "deb":
+            package = files[newfile]["package"]
+            version = files[newfile]["version"]
+            architecture = files[newfile]["architecture"]
             q = projectB.query("SELECT b.id FROM binaries b, architecture a WHERE b.package = '%s' AND b.version = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all') AND b.architecture = a.id" % (package, version, architecture))
             ql = q.getresult()
             if not ql:
@@ -535,13 +535,13 @@ def stable_install (summary, short_summary):
         os.unlink (new_changelog_filename)
 
     new_changelog = utils.open_file(new_changelog_filename, 'w')
-    for file in files.keys():
-        if files[file]["type"] == "deb":
-            new_changelog.write("stable/%s/binary-%s/%s\n" % (files[file]["component"], files[file]["architecture"], file))
-        elif utils.re_issource.match(file):
-            new_changelog.write("stable/%s/source/%s\n" % (files[file]["component"], file))
+    for newfile in files.keys():
+        if files[newfile]["type"] == "deb":
+            new_changelog.write("stable/%s/binary-%s/%s\n" % (files[newfile]["component"], files[newfile]["architecture"], newfile))
+        elif utils.re_issource.match(newfile):
+            new_changelog.write("stable/%s/source/%s\n" % (files[newfile]["component"], newfile))
         else:
-            new_changelog.write("%s\n" % (file))
+            new_changelog.write("%s\n" % (newfile))
     chop_changes = queue.re_fdnic.sub("\n", changes["changes"])
     new_changelog.write(chop_changes + '\n\n')
     if os.access(changelog_filename, os.R_OK) != 0:
diff --git a/dak/process_new.py b/dak/process_new.py
index 8a43f12..d742ac2 100755
--- a/dak/process_new.py
+++ b/dak/process_new.py
@@ -230,6 +230,7 @@ def sort_changes(changes_files):
 class Section_Completer:
     def __init__ (self):
         self.sections = []
+        self.matches = []
         q = projectB.query("SELECT section FROM section")
         for i in q.getresult():
             self.sections.append(i[0])
@@ -251,6 +252,7 @@ class Section_Completer:
 class Priority_Completer:
     def __init__ (self):
         self.priorities = []
+        self.matches = []
         q = projectB.query("SELECT priority FROM priority")
         for i in q.getresult():
             self.priorities.append(i[0])
diff --git a/dak/update_db.py b/dak/update_db.py
index 6db74d9..e9f8441 100755
--- a/dak/update_db.py
+++ b/dak/update_db.py
@@ -29,6 +29,7 @@
 import psycopg2, sys, fcntl, os
 import apt_pkg
 import time
+import errno
 from daklib import database
 from daklib import utils
 
-- 
1.5.6.5


Reply to: