Merge bug 414560 to hg repositories (force file support for patcher fastmode), originally p=rhelmer, r=bsmedberg,bhearsum NPOTB UPDATE_PACKAGING_R7
authorNick Thomas <nrthomas@gmail.com>
Mon, 02 Feb 2009 18:14:02 +1300
changeset 24517 fb32f6e1859c07846a01b4478a7b1678019e0b45
parent 24516 02fe3defdfc5cba9221acb7f9216fe9b70ed0247
child 24518 b7d7aa6ac670efc3092bc01ed1581c66cb406c55
push idunknown
push userunknown
push dateunknown
reviewersbsmedberg, bhearsum
bugs414560
milestone1.9.2a1pre
Merge bug 414560 to hg repositories (force file support for patcher fastmode), originally p=rhelmer, r=bsmedberg,bhearsum NPOTB
tools/update-packaging/make_incremental_updates.py
--- a/tools/update-packaging/make_incremental_updates.py
+++ b/tools/update-packaging/make_incremental_updates.py
@@ -251,34 +251,41 @@ def process_explicit_remove_files(dir_pa
 
         for line in list_file:
             line = line.strip()
             # Exclude any blank lines or any lines ending with a slash, which indicate
             # directories.  The updater doesn't know how to remove entire directories.
             if line and not line.endswith("/"): 
                 patch_info.append_remove_instruction(os.path.join(prefix,line))
 
-def create_partial_patch(from_dir_path, to_dir_path, patch_filename, shas, patch_info):
+def create_partial_patch(from_dir_path, to_dir_path, patch_filename, shas, patch_info, forced_updates):
     """ Builds a partial patch by comparing the files in from_dir_path to thoes of to_dir_path"""
     # Cannocolize the paths for safey
     from_dir_path = os.path.abspath(from_dir_path)
     to_dir_path = os.path.abspath(to_dir_path)
     # First create a hashtable of the from  and to directories
     from_dir_hash,from_dir_set = patch_info.build_marfile_entry_hash(from_dir_path)
     to_dir_hash,to_dir_set = patch_info.build_marfile_entry_hash(to_dir_path)
+    # Create a list of the forced updates 
+    forced_list = forced_updates.strip().split('|')
     
     # Files which exist in both sets need to be patched
     patch_filenames = list(from_dir_set.intersection(to_dir_set))
     patch_filenames.sort()
     for filename in patch_filenames:
         from_marfile_entry = from_dir_hash[filename]
         to_marfile_entry = to_dir_hash[filename]
-        if from_marfile_entry.sha() != to_marfile_entry.sha():
-            # Not the same - calculate a patch
-            create_partial_patch_for_file(from_marfile_entry, to_marfile_entry, shas, patch_info)
+        if filename in forced_list:
+            print "Forcing "+ filename
+            # This filename is in the forced list, explicitly add
+       	    create_add_patch_for_file(to_dir_hash[filename], patch_info)
+        else: 
+          if from_marfile_entry.sha() != to_marfile_entry.sha():
+              # Not the same - calculate a patch
+              create_partial_patch_for_file(from_marfile_entry, to_marfile_entry, shas, patch_info)
 
     # files in from_dir not in to_dir need to be removed
     remove_filenames = list(from_dir_set - to_dir_set)
     remove_filenames.sort()
     for filename in remove_filenames:
         patch_info.append_remove_instruction(from_dir_hash[filename].name)
 
     # files in to_dir not in from_dir need to added
@@ -382,17 +389,17 @@ def create_partial_patches(patches):
             extract_mar(to_filename, work_dir_to)
             to_decoded = decode_filename(from_filename)
             to_buildid = get_buildid(work_dir_to, to_decoded['platform'])
             to_shasum = sha.sha(open(to_filename).read()).hexdigest()
             to_size = str(os.path.getsize(to_filename))
 
             mar_extract_time = time.time()
 
-            partial_filename = create_partial_patch(work_dir_from, work_dir_to, patch_filename, shas, PatchInfo(work_dir, ['channel-prefs.js','update.manifest','removed-files'],['/readme.txt']))
+            partial_filename = create_partial_patch(work_dir_from, work_dir_to, patch_filename, shas, PatchInfo(work_dir, ['channel-prefs.js','update.manifest','removed-files'],['/readme.txt']),forced_updates)
             partial_buildid = to_buildid
             partial_shasum = sha.sha(open(partial_filename).read()).hexdigest()
             partial_size = str(os.path.getsize(partial_filename))
 
             metadata.append({
              'to_filename': os.path.basename(to_filename),
              'from_filename': os.path.basename(from_filename),
              'partial_filename': os.path.basename(partial_filename),