summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorMateusz Pilat <m.pilat@partner.samsung.com>2020-02-14 18:37:35 +0100
committerMateusz Pilat <m.pilat@partner.samsung.com>2020-02-14 20:30:42 +0100
commitf1d9816984b695fc4ae6a011c23c5f274385070e (patch)
tree7b9766f680500eac09d676334fb6963cfb60096c
parentd8b17a1fe40bdc1fb1df0a70be6e92cdbc5ce09c (diff)
Fix packaging script - Memory error in md5 calculations
During package preparation whole file is read and stashed in to RAM. when the file is large, like ONAP package it will result in an error Signed-off-by: Mateusz Pilat <m.pilat@partner.samsung.com> Issue-ID: OOM-2308 Change-Id: Ic2dbea4018162bd4cfb380f6fb34078062c38a09
-rwxr-xr-xbuild/package.py8
1 files changed, 5 insertions, 3 deletions
diff --git a/build/package.py b/build/package.py
index 82fbba34..9e37d4bf 100755
--- a/build/package.py
+++ b/build/package.py
@@ -107,11 +107,13 @@ def add_checksum_info(output_dir):
tar_files = ['resources_package.tar', 'aux_package.tar', 'sw_package.tar']
for tar_file in tar_files:
try:
- data = os.path.join(output_dir, tar_file)
- cksum = hashlib.md5(open(data, 'rb').read()).hexdigest()
+ checksum = hashlib.md5()
+ with open(os.path.join(output_dir, tar_file), 'rb') as f:
+ for chunk in iter(lambda: f.read(4096), b""):
+ checksum.update(chunk)
with open(os.path.join(output_dir, 'package.info'), 'r') as f:
json_data = json.load(f)
- json_data['Build_info']['Packages'].update({tar_file: cksum})
+ json_data['Build_info']['Packages'].update({tar_file: checksum.hexdigest()})
with open(os.path.join(output_dir, 'package.info'), 'w') as f:
json.dump(json_data, f, indent=4)
except FileNotFoundError: