fix larger than memory error

This commit is contained in:
Aitor Morales-Gregorio 2025-09-26 08:58:03 +02:00
parent 895ba73144
commit eae8e29f47
3 changed files with 17 additions and 13 deletions

View file

@ -6,9 +6,12 @@ import time
def process_image(input_tuple):
fname, A = input_tuple
if len(A.shape) > 2:
A = A.mean(axis=-1) # Take average color
A = A[::5, ::5] # Downsample
n_threads = os.getenv('OMP_NUM_THREADS', '(unset)')
print(f"Worker {fname=} OMP_NUM_THREADS={n_threads}", flush=True)
# Decompose image
U, S, Vh = np.linalg.svd(A)
@ -57,10 +60,10 @@ if __name__ == '__main__':
new_images = p.map(process_image, image_arrays)
elapsed_time = time.time() - start_time
# I/O save the processed images
for im, fname in zip(new_images, fnames):
im = Image.fromarray(im)
im.save(fname.replace('images', 'processed_images'))
# # I/O save the processed images
# for im, fname in zip(new_images, fnames):
# im = Image.fromarray(im)
# im.save(fname.replace('images', 'processed_images'))
print(f'{n_processes} processes and {n_threads} threads and {len(fnames)} jobs: {elapsed_time}\n',
flush=True)