[Date Prev][Date Next] [Thread Prev][Thread Next] [Date Index] [Thread Index]

[dak/security] revert change to get_files_id



Signed-off-by: Mark Hymers <mhy@debian.org>
---
 ChangeLog                |    4 ++++
 dak/process_accepted.py  |    2 +-
 dak/process_unchecked.py |    4 ++--
 daklib/database.py       |   10 ++++------
 4 files changed, 11 insertions(+), 9 deletions(-)

diff --git a/ChangeLog b/ChangeLog
index 770fe5f..3e3f33e 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,5 +1,9 @@
 2008-08-15  Mark Hymers  <mhy@debian.org>
 
+	*  dak/process_accepted.py, dak/process_unchecked.py,
+	daklib/database.py: Don't change get_files_id to use sha1sum and
+	sha256sum.
+
 	* setup/init_pool.sql, dak/check_archive.py, dak/decode_dot_dak.py,
 	dak/process_accepted.py, dak/process_unchecked.py, daklib/database.py,
 	daklib/queue.py, daklib/utils.py: Attempt to add sha1sum and
diff --git a/dak/process_accepted.py b/dak/process_accepted.py
index b28d9f9..a26ce57 100755
--- a/dak/process_accepted.py
+++ b/dak/process_accepted.py
@@ -311,7 +311,7 @@ def install ():
                 # files id is stored in dsc_files by check_dsc().
                 files_id = dsc_files[dsc_file].get("files id", None)
                 if files_id == None:
-                    files_id = database.get_files_id(filename, dsc_files[dsc_file]["size"], dsc_files[dsc_file]["md5sum"], files[file]["sha1sum"], files[file]["sha256sum"], dsc_location_id)
+                    files_id = database.get_files_id(filename, dsc_files[dsc_file]["size"], dsc_files[dsc_file]["md5sum"], dsc_location_id)
                 # FIXME: needs to check for -1/-2 and or handle exception
                 if files_id == None:
                     files_id = database.set_files_id (filename, dsc_files[dsc_file]["size"], dsc_files[dsc_file]["md5sum"], files[file]["sha1sum"], files[file]["sha256sum"], dsc_location_id)
diff --git a/dak/process_unchecked.py b/dak/process_unchecked.py
index 3354a57..9318770 100755
--- a/dak/process_unchecked.py
+++ b/dak/process_unchecked.py
@@ -630,11 +630,11 @@ def check_files():
 
             # Check the md5sum & size against existing files (if any)
             files[f]["pool name"] = utils.poolify (changes["source"], files[f]["component"])
-            files_id = database.get_files_id(files[f]["pool name"] + f, files[f]["size"], files[f]["md5sum"], files[f]["sha1sum"], files[f]["sha256sum"], files[f]["location id"])
+            files_id = database.get_files_id(files[f]["pool name"] + f, files[f]["size"], files[f]["md5sum"], files[f]["location id"])
             if files_id == -1:
                 reject("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
             elif files_id == -2:
-                reject("md5sum, sha1sum, sha256sum and/or size mismatch on existing copy of %s." % (f))
+                reject("md5sum and/or size mismatch on existing copy of %s." % (f))
             files[f]["files id"] = files_id
 
             # Check for packages that have moved from one component to another
diff --git a/daklib/database.py b/daklib/database.py
index b2b55a7..e11d3cd 100755
--- a/daklib/database.py
+++ b/daklib/database.py
@@ -317,7 +317,7 @@ def get_or_set_fingerprint_id (fingerprint):
 
 ################################################################################
 
-def get_files_id (filename, size, md5sum, sha1sum, sha256sum, location_id):
+def get_files_id (filename, size, md5sum, location_id):
     global files_id_cache
 
     cache_key = "%s_%d" % (filename, location_id)
@@ -326,7 +326,7 @@ def get_files_id (filename, size, md5sum, sha1sum, sha256sum, location_id):
         return files_id_cache[cache_key]
 
     size = int(size)
-    q = projectB.query("SELECT id, size, md5sum, sha1sum, sha256sum FROM files WHERE filename = '%s' AND location = %d" % (filename, location_id))
+    q = projectB.query("SELECT id, size, md5sum FROM files WHERE filename = '%s' AND location = %d" % (filename, location_id))
     ql = q.getresult()
     if ql:
         if len(ql) != 1:
@@ -334,9 +334,7 @@ def get_files_id (filename, size, md5sum, sha1sum, sha256sum, location_id):
         ql = ql[0]
         orig_size = int(ql[1])
         orig_md5sum = ql[2]
-        orig_sha1sum = ql[3]
-        orig_sha256sum = ql[4]
-        if orig_size != size or orig_md5sum != md5sum or orig_sha1sum != sha1sum or orig_sha256sum != sha256sum:
+        if orig_size != size or orig_md5sum != md5sum:
             return -2
         files_id_cache[cache_key] = ql[0]
         return files_id_cache[cache_key]
@@ -367,7 +365,7 @@ def set_files_id (filename, size, md5sum, sha1sum, sha256sum, location_id):
 
     projectB.query("INSERT INTO files (filename, size, md5sum, sha1sum, sha256sum, location) VALUES ('%s', %d, '%s', %d)" % (filename, long(size), md5sum, sha1sum, sha256sum, location_id))
 
-    return get_files_id (filename, size, md5sum, sha1sum, sha256sum, location_id)
+    return get_files_id (filename, size, md5sum, location_id)
 
     ### currval has issues with postgresql 7.1.3 when the table is big
     ### it was taking ~3 seconds to return on auric which is very Not
-- 
1.5.6.5



Reply to: