Commit 8e70ac8e authored by sumpfralle's avatar sumpfralle

disable ODE in multiprocessing mode ("get_shape" results are not pickable)


git-svn-id: https://pycam.svn.sourceforge.net/svnroot/pycam/trunk@831 bbaffbd6-741e-11dd-a85d-61de82d9cad9
parent 726eacd5
...@@ -609,7 +609,8 @@ class ProjectGui: ...@@ -609,7 +609,8 @@ class ProjectGui:
obj.connect("color-set", self.update_view) obj.connect("color-set", self.update_view)
# set the availability of ODE # set the availability of ODE
enable_ode_control = self.gui.get_object("SettingEnableODE") enable_ode_control = self.gui.get_object("SettingEnableODE")
if pycam.Physics.ode_physics.is_ode_available(): if not pycam.Utils.threading.is_multiprocessing_enabled \
and pycam.Physics.ode_physics.is_ode_available():
self.settings.add_item("enable_ode", enable_ode_control.get_active, enable_ode_control.set_active) self.settings.add_item("enable_ode", enable_ode_control.get_active, enable_ode_control.set_active)
else: else:
enable_ode_control.set_sensitive(False) enable_ode_control.set_sensitive(False)
...@@ -1591,7 +1592,11 @@ class ProjectGui: ...@@ -1591,7 +1592,11 @@ class ProjectGui:
self.log_model.append((timestamp, title, message)) self.log_model.append((timestamp, title, message))
# update the status bar (if the GTK interface is still active) # update the status bar (if the GTK interface is still active)
if not self.status_bar.window is None: if not self.status_bar.window is None:
self.status_bar.push(0, message) try:
self.status_bar.push(0, message)
except TypeError:
new_message = re.sub("[^\w\s]", "", message)
self.status_bar.push(0, new_message)
@gui_activity_guard @gui_activity_guard
def copy_log_to_clipboard(self, widget=None): def copy_log_to_clipboard(self, widget=None):
......
...@@ -109,10 +109,7 @@ class DropCutter: ...@@ -109,10 +109,7 @@ class DropCutter:
xy_coords = [(pos.x, pos.y) for pos in one_grid_line] xy_coords = [(pos.x, pos.y) for pos in one_grid_line]
args.append((xy_coords, minz, maxz, self.model, self.cutter, args.append((xy_coords, minz, maxz, self.model, self.cutter,
self.physics)) self.physics))
# ODE does not work with multi-threading (TODO: check this) for points in run_in_parallel(_process_one_grid_line, args):
disable_multiprocessing = not self.physics is None
for points in run_in_parallel(_process_one_grid_line,
args, disable_multiprocessing=disable_multiprocessing):
self.pa.new_scanline() self.pa.new_scanline()
if draw_callback and draw_callback(text="DropCutter: processing " \ if draw_callback and draw_callback(text="DropCutter: processing " \
+ "line %d/%d" % (current_line + 1, num_of_lines)): + "line %d/%d" % (current_line + 1, num_of_lines)):
......
...@@ -43,6 +43,10 @@ def _process_one_line((p1, p2, depth, models, cutter, physics)): ...@@ -43,6 +43,10 @@ def _process_one_line((p1, p2, depth, models, cutter, physics)):
class PushCutter: class PushCutter:
def __init__(self, cutter, models, path_processor, physics=None): def __init__(self, cutter, models, path_processor, physics=None):
if physics is None:
log.debug("Starting PushCutter (without ODE)")
else:
log.debug("Starting PushCutter (with ODE)")
self.cutter = cutter self.cutter = cutter
self.models = models self.models = models
self.pa = path_processor self.pa = path_processor
...@@ -130,10 +134,7 @@ class PushCutter: ...@@ -130,10 +134,7 @@ class PushCutter:
p1, p2 = line p1, p2 = line
args.append((p1, p2, depth, models, self.cutter, self.physics)) args.append((p1, p2, depth, models, self.cutter, self.physics))
# ODE does not work with multi-threading for points in run_in_parallel(_process_one_line, args):
disable_multiprocessing = not self.physics is None
for points in run_in_parallel(_process_one_line, args,
disable_multiprocessing=disable_multiprocessing):
if points: if points:
self.pa.new_scanline() self.pa.new_scanline()
for p in points: for p in points:
......
...@@ -123,6 +123,7 @@ def generate_toolpath(model, tool_settings=None, ...@@ -123,6 +123,7 @@ def generate_toolpath(model, tool_settings=None,
@return: the resulting toolpath object or an error string in case of invalid @return: the resulting toolpath object or an error string in case of invalid
arguments arguments
""" """
log.debug("Starting toolpath generation")
overlap = number(overlap) overlap = number(overlap)
step_down = number(step_down) step_down = number(step_down)
engrave_offset = number(engrave_offset) engrave_offset = number(engrave_offset)
......
...@@ -54,7 +54,7 @@ log = pycam.Utils.log.get_logger() ...@@ -54,7 +54,7 @@ log = pycam.Utils.log.get_logger()
# possible values: # possible values:
# None: not initialized # None: not initialized
# False: no threading # False: no threading
# multiprocessing: the multiprocessing module is impored and enabled # multiprocessing: the multiprocessing module is imported and enabled later
__multiprocessing = None __multiprocessing = None
# needs to be initialized, if multiprocessing is enabled # needs to be initialized, if multiprocessing is enabled
...@@ -76,6 +76,13 @@ def run_in_parallel(*args, **kwargs): ...@@ -76,6 +76,13 @@ def run_in_parallel(*args, **kwargs):
def is_pool_available(): def is_pool_available():
return not __manager is None return not __manager is None
def is_multiprocessing_enabled():
return not __multiprocessing is None
def is_server_mode_available():
# server mode is disabled for the Windows standalone executable
return not (hasattr(sys, "frozen") and sys.frozen)
def get_pool_statistics(): def get_pool_statistics():
global __manager global __manager
if __manager is None: if __manager is None:
...@@ -83,12 +90,13 @@ def get_pool_statistics(): ...@@ -83,12 +90,13 @@ def get_pool_statistics():
else: else:
return __manager.statistics().get_worker_statistics() return __manager.statistics().get_worker_statistics()
def init_threading(number_of_processes=None, enable_server=False, remote=None, run_server=False, def init_threading(number_of_processes=None, enable_server=False, remote=None,
server_credentials=""): run_server=False, server_credentials=""):
global __multiprocessing, __num_of_processes, __manager, __closing, __task_source_uuid global __multiprocessing, __num_of_processes, __manager, __closing, __task_source_uuid
# server mode is disabled for the Windows standalone executable if __multiprocessing:
is_frozen = hasattr(sys, "frozen") and sys.frozen # kill the manager and clean everything up for a re-initialization
if is_frozen and (enable_server or run_server): cleanup()
if (not is_server_mode_available()) and (enable_server or run_server):
# server mode is disabled for the Windows pyinstaller standalone # server mode is disabled for the Windows pyinstaller standalone
# due to "pickle errors". How to reproduce: run the standalone binary # due to "pickle errors". How to reproduce: run the standalone binary
# with "--enable-server --server-auth-key foo". # with "--enable-server --server-auth-key foo".
...@@ -113,7 +121,7 @@ def init_threading(number_of_processes=None, enable_server=False, remote=None, r ...@@ -113,7 +121,7 @@ def init_threading(number_of_processes=None, enable_server=False, remote=None, r
remote = None remote = None
run_server = None run_server = None
server_credentials = "" server_credentials = ""
if is_frozen: if not is_server_mode_available():
# Running multiple processes with the Windows standalone executable # Running multiple processes with the Windows standalone executable
# causes "WindowsError: invalid handle" error messages. The processes # causes "WindowsError: invalid handle" error messages. The processes
# can't communicate - thus no results are returned. # can't communicate - thus no results are returned.
...@@ -246,6 +254,8 @@ def cleanup(): ...@@ -246,6 +254,8 @@ def cleanup():
# check if it is still alive and kill it if necessary # check if it is still alive and kill it if necessary
if __manager._process.is_alive(): if __manager._process.is_alive():
__manager._process.terminate() __manager._process.terminate()
__manager = None
__closing = None
def _spawn_daemon(manager, number_of_processes, worker_uuid_list): def _spawn_daemon(manager, number_of_processes, worker_uuid_list):
""" wait for items in the 'tasks' queue to appear and then spawn workers """ wait for items in the 'tasks' queue to appear and then spawn workers
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment