From baa24370f6cdca29b168586f0581a15bcfe6ec53 Mon Sep 17 00:00:00 2001 From: Arjen Hiemstra Date: Thu, 22 Sep 2016 00:57:14 +0200 Subject: [PATCH] Force garbage collection during ProcessSlicedLayersJob For some reason, Python likes to hold on to LayerData and friends. Forcing a GC run here will clean them up properly. Contributes to CURA-2406 --- plugins/CuraEngineBackend/ProcessSlicedLayersJob.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/plugins/CuraEngineBackend/ProcessSlicedLayersJob.py b/plugins/CuraEngineBackend/ProcessSlicedLayersJob.py index 7443340c5b..c4e9554b2c 100644 --- a/plugins/CuraEngineBackend/ProcessSlicedLayersJob.py +++ b/plugins/CuraEngineBackend/ProcessSlicedLayersJob.py @@ -1,6 +1,8 @@ # Copyright (c) 2016 Ultimaker B.V. # Cura is released under the terms of the AGPLv3 or higher. +import gc + from UM.Job import Job from UM.Scene.Iterator.DepthFirstIterator import DepthFirstIterator from UM.Scene.SceneNode import SceneNode @@ -64,6 +66,12 @@ class ProcessSlicedLayersJob(Job): self._progress.hide() return + # Force garbage collection. + # For some reason, Python has a tendency to keep the layer data + # in memory longer than needed. Forcing the GC to run here makes + # sure any old layer data is really cleaned up before adding new. + gc.collect() + mesh = MeshData() layer_data = LayerDataBuilder.LayerDataBuilder() layer_count = len(self._layers)