Commit f84adff7 authored by sumpfralle's avatar sumpfralle

first batch of style fixes according to pylint (including minor typos)


git-svn-id: https://pycam.svn.sourceforge.net/svnroot/pycam/trunk@974 bbaffbd6-741e-11dd-a85d-61de82d9cad9
parent 7edff56e
...@@ -55,7 +55,7 @@ import time ...@@ -55,7 +55,7 @@ import time
try: try:
import multiprocessing import multiprocessing
except ImportError: except ImportError:
class multiprocessing: class multiprocessing(object):
# use an arbitrary other Exception # use an arbitrary other Exception
AuthenticationError = socket.error AuthenticationError = socket.error
...@@ -89,7 +89,8 @@ def show_gui(inputfile=None, task_settings_file=None): ...@@ -89,7 +89,8 @@ def show_gui(inputfile=None, task_settings_file=None):
full_report.append("Details:") full_report.append("Details:")
full_report.append(report_gtk) full_report.append(report_gtk)
full_report.append("") full_report.append("")
full_report.append("Detailed list of requirements: %s" % GuiCommon.REQUIREMENTS_LINK) full_report.append("Detailed list of requirements: %s" % \
GuiCommon.REQUIREMENTS_LINK)
log.critical(os.linesep.join(full_report)) log.critical(os.linesep.join(full_report))
return EXIT_CODES["requirements"] return EXIT_CODES["requirements"]
...@@ -134,7 +135,7 @@ def load_model_file(filename, program_locations, unit=None): ...@@ -134,7 +135,7 @@ def load_model_file(filename, program_locations, unit=None):
if not os.path.isfile(filename): if not os.path.isfile(filename):
log.warn("The input file ('%s') was not found!" % filename) log.warn("The input file ('%s') was not found!" % filename)
return None return None
filetype, importer = pycam.Importers.detect_file_type(filename) importer = pycam.Importers.detect_file_type(filename)[1]
model = importer(filename, program_locations=program_locations, unit=unit) model = importer(filename, program_locations=program_locations, unit=unit)
if model is None: if model is None:
log.warn("Failed to load the model file (%s)." % filename) log.warn("Failed to load the model file (%s)." % filename)
...@@ -180,13 +181,14 @@ def execute(parser, opts, args, pycam): ...@@ -180,13 +181,14 @@ def execute(parser, opts, args, pycam):
# print only the bare version number # print only the bare version number
print VERSION print VERSION
else: else:
print "PyCAM %s" % VERSION text = '''PyCAM %s
print "Copyright (C) 2008-2010 Lode Leroy" Copyright (C) 2008-2010 Lode Leroy
print "Copyright (C) 2010 Lars Kruse" Copyright (C) 2010-2011 Lars Kruse
print
print "License GPLv3+: GNU GPL version 3 or later <http://gnu.org/licenses/gpl.html>." License GPLv3+: GNU GPL version 3 or later <http://gnu.org/licenses/gpl.html>.
print "This is free software: you are free to change and redistribute it." This is free software: you are free to change and redistribute it.
print "There is NO WARRANTY, to the extent permitted by law." There is NO WARRANTY, to the extent permitted by law.''' % VERSION
print text
return EXIT_CODES["ok"] return EXIT_CODES["ok"]
if not opts.disable_psyco: if not opts.disable_psyco:
...@@ -271,7 +273,7 @@ def execute(parser, opts, args, pycam): ...@@ -271,7 +273,7 @@ def execute(parser, opts, args, pycam):
elif opts.support_type == "none": elif opts.support_type == "none":
pass pass
else: else:
raise NotImplemented, "Invalid support type specified: %s" % \ raise NotImplementedError, "Invalid support type specified: %s" % \
opts.support_type opts.support_type
if opts.collision_engine == "ode": if opts.collision_engine == "ode":
tps.set_calculation_backend("ODE") tps.set_calculation_backend("ODE")
...@@ -413,7 +415,7 @@ def execute(parser, opts, args, pycam): ...@@ -413,7 +415,7 @@ def execute(parser, opts, args, pycam):
handler, closer = get_output_handler(opts.export_task_config) handler, closer = get_output_handler(opts.export_task_config)
if handler is None: if handler is None:
return EXIT_CODES["write_output_failed"] return EXIT_CODES["write_output_failed"]
print >>handler, tps.get_string() print >> handler, tps.get_string()
closer() closer()
# no error -> don't return a specific exit code # no error -> don't return a specific exit code
return None return None
...@@ -434,8 +436,8 @@ if __name__ == "__main__": ...@@ -434,8 +436,8 @@ if __name__ == "__main__":
+ "in batch mode. Most parameters are useful only for " \ + "in batch mode. Most parameters are useful only for " \
+ "batch mode.", + "batch mode.",
epilog="Take a look at the wiki for more information: " \ epilog="Take a look at the wiki for more information: " \
+ "http://sourceforge.net/apps/mediawiki/pycam/.\n" \ + "http://sourceforge.net/apps/mediawiki/pycam/.\nBug reports" \
+ "Bug reports: http://sourceforge.net/tracker/?group_id=237831&atid=1104176") + ": http://sf.net/tracker/?group_id=237831&atid=1104176")
group_general = parser.add_option_group("General options") group_general = parser.add_option_group("General options")
group_export = parser.add_option_group("Export formats", group_export = parser.add_option_group("Export formats",
"Export the resulting toolpath or meta-data in various formats. " \ "Export the resulting toolpath or meta-data in various formats. " \
...@@ -512,8 +514,8 @@ if __name__ == "__main__": ...@@ -512,8 +514,8 @@ if __name__ == "__main__":
+ "connecting to a remote server or for granting access " \ + "connecting to a remote server or for granting access " \
+ "to remote clients.") + "to remote clients.")
group_general.add_option("-q", "--quiet", dest="quiet", group_general.add_option("-q", "--quiet", dest="quiet",
default=False, action="store_true", help="output only warnings and " \ default=False, action="store_true", help="output only warnings " \
+ "errors.") + "and errors.")
group_general.add_option("-d", "--debug", dest="debug", group_general.add_option("-d", "--debug", dest="debug",
default=False, action="store_true", help="enable output of debug " \ default=False, action="store_true", help="enable output of debug " \
+ "messages.") + "messages.")
......
...@@ -27,56 +27,56 @@ from pycam.Toolpath import simplify_toolpath ...@@ -27,56 +27,56 @@ from pycam.Toolpath import simplify_toolpath
class ContourCutter(pycam.PathProcessors.BasePathProcessor): class ContourCutter(pycam.PathProcessors.BasePathProcessor):
def __init__(self, reverse=False): def __init__(self, reverse=False):
self.paths = [] super(ContourCutter, self).__init__()
self.curr_path = None self.curr_path = None
self.scanline = None self.scanline = None
self.pe = None self.polygon_extractor = None
self.points = [] self.points = []
self.reverse = reverse self.reverse = reverse
self.__forward = Point(1, 1, 0) self.__forward = Point(1, 1, 0)
def append(self, p): def append(self, point):
# Sort the points in positive x/y direction - otherwise the # Sort the points in positive x/y direction - otherwise the
# PolygonExtractor breaks. # PolygonExtractor breaks.
if self.points and (p.sub(self.points[0]).dot(self.__forward) < 0): if self.points and (point.sub(self.points[0]).dot(self.__forward) < 0):
self.points.insert(0, p) self.points.insert(0, point)
else: else:
self.points.append(p) self.points.append(point)
def new_direction(self, direction): def new_direction(self, direction):
if self.pe == None: if self.polygon_extractor == None:
self.pe = PolygonExtractor(PolygonExtractor.CONTOUR) self.polygon_extractor = PolygonExtractor(PolygonExtractor.CONTOUR)
self.pe.new_direction(direction) self.polygon_extractor.new_direction(direction)
def end_direction(self): def end_direction(self):
self.pe.end_direction() self.polygon_extractor.end_direction()
def new_scanline(self): def new_scanline(self):
self.pe.new_scanline() self.polygon_extractor.new_scanline()
self.points = [] self.points = []
def end_scanline(self): def end_scanline(self):
for i in range(1, len(self.points)-1): for i in range(1, len(self.points) - 1):
self.pe.append(self.points[i]) self.polygon_extractor.append(self.points[i])
self.pe.end_scanline() self.polygon_extractor.end_scanline()
def finish(self): def finish(self):
self.pe.finish() self.polygon_extractor.finish()
if self.pe.merge_path_list: if self.polygon_extractor.merge_path_list:
paths = self.pe.merge_path_list paths = self.polygon_extractor.merge_path_list
elif self.pe.hor_path_list: elif self.polygon_extractor.hor_path_list:
paths = self.pe.hor_path_list paths = self.polygon_extractor.hor_path_list
else: else:
paths = self.pe.ver_path_list paths = self.polygon_extractor.ver_path_list
if paths: if paths:
for p in paths: for path in paths:
p.append(p.points[0]) path.append(path.points[0])
simplify_toolpath(p) simplify_toolpath(path)
if paths: if paths:
if self.reverse: if self.reverse:
paths.reverse() paths.reverse()
self.paths.extend(paths) self.paths.extend(paths)
self.sort_layered() self.sort_layered()
self.pe = None self.polygon_extractor = None
...@@ -28,19 +28,19 @@ from pycam.Geometry.Path import Path ...@@ -28,19 +28,19 @@ from pycam.Geometry.Path import Path
class PathAccumulator(pycam.PathProcessors.BasePathProcessor): class PathAccumulator(pycam.PathProcessors.BasePathProcessor):
def __init__(self, zigzag=False, reverse=False): def __init__(self, zigzag=False, reverse=False):
self.paths = [] super(PathAccumulator, self).__init__()
self.curr_path = None self.curr_path = None
self.zigzag = zigzag self.zigzag = zigzag
self.scanline = None self.scanline = None
self.reverse = reverse self.reverse = reverse
def append(self, p): def append(self, point):
if self.curr_path == None: if self.curr_path == None:
self.curr_path = Path() self.curr_path = Path()
if self.reverse: if self.reverse:
self.curr_path.insert(0, p) self.curr_path.insert(0, point)
else: else:
self.curr_path.append(p) self.curr_path.append(point)
def new_direction(self, direction): def new_direction(self, direction):
self.scanline = 0 self.scanline = 0
......
...@@ -29,51 +29,51 @@ from pycam.Toolpath import simplify_toolpath ...@@ -29,51 +29,51 @@ from pycam.Toolpath import simplify_toolpath
class PolygonCutter(pycam.PathProcessors.BasePathProcessor): class PolygonCutter(pycam.PathProcessors.BasePathProcessor):
def __init__(self, reverse=False): def __init__(self, reverse=False):
self.paths = [] super(PolygonCutter, self).__init__()
self.curr_path = None self.curr_path = None
self.scanline = None self.scanline = None
self.pe = PolygonExtractor(PolygonExtractor.MONOTONE) self.poly_extractor = PolygonExtractor(PolygonExtractor.MONOTONE)
self.reverse = reverse self.reverse = reverse
def append(self, p): def append(self, point):
self.pe.append(p) self.poly_extractor.append(point)
def new_direction(self, direction): def new_direction(self, direction):
self.pe.new_direction(direction) self.poly_extractor.new_direction(direction)
def end_direction(self): def end_direction(self):
self.pe.end_direction() self.poly_extractor.end_direction()
def new_scanline(self): def new_scanline(self):
self.pe.new_scanline() self.poly_extractor.new_scanline()
def end_scanline(self): def end_scanline(self):
self.pe.end_scanline() self.poly_extractor.end_scanline()
def finish(self): def finish(self):
self.pe.finish() self.poly_extractor.finish()
paths = [] paths = []
source_paths = [] source_paths = []
if self.pe.hor_path_list: if self.poly_extractor.hor_path_list:
source_paths.extend(self.pe.hor_path_list) source_paths.extend(self.poly_extractor.hor_path_list)
if self.pe.ver_path_list: if self.poly_extractor.ver_path_list:
source_paths.extend(self.pe.ver_path_list) source_paths.extend(self.poly_extractor.ver_path_list)
for path in source_paths: for path in source_paths:
points = path.points points = path.points
for i in range(0, (len(points)+1)/2): for i in range(0, (len(points)+1)/2):
p = Path() new_path = Path()
if i % 2 == 0: if i % 2 == 0:
p.append(points[i]) new_path.append(points[i])
p.append(points[-i-1]) new_path.append(points[-i-1])
else: else:
p.append(points[-i-1]) new_path.append(points[-i-1])
p.append(points[i]) new_path.append(points[i])
paths.append(p) paths.append(new_path)
if paths: if paths:
for p in paths: for path in paths:
simplify_toolpath(p) simplify_toolpath(path)
if self.reverse: if self.reverse:
p.reverse() path.reverse()
self.paths.extend(paths) self.paths.extend(paths)
self.sort_layered() self.sort_layered()
...@@ -26,11 +26,11 @@ from pycam.Toolpath import simplify_toolpath ...@@ -26,11 +26,11 @@ from pycam.Toolpath import simplify_toolpath
class SimpleCutter(pycam.PathProcessors.BasePathProcessor): class SimpleCutter(pycam.PathProcessors.BasePathProcessor):
def __init__(self, reverse=False): def __init__(self, reverse=False):
self.paths = [] super(SimpleCutter, self).__init__()
self.curr_path = None self.curr_path = None
self.reverse = reverse self.reverse = reverse
def append(self, p): def append(self, point):
curr_path = None curr_path = None
if self.curr_path == None: if self.curr_path == None:
curr_path = Path() curr_path = Path()
...@@ -38,7 +38,7 @@ class SimpleCutter(pycam.PathProcessors.BasePathProcessor): ...@@ -38,7 +38,7 @@ class SimpleCutter(pycam.PathProcessors.BasePathProcessor):
else: else:
curr_path = self.curr_path curr_path = self.curr_path
self.curr_path = None self.curr_path = None
curr_path.append(p) curr_path.append(point)
if self.curr_path == None: if self.curr_path == None:
simplify_toolpath(curr_path) simplify_toolpath(curr_path)
if self.reverse: if self.reverse:
......
...@@ -26,13 +26,13 @@ from pycam.Toolpath import simplify_toolpath ...@@ -26,13 +26,13 @@ from pycam.Toolpath import simplify_toolpath
class ZigZagCutter(pycam.PathProcessors.BasePathProcessor): class ZigZagCutter(pycam.PathProcessors.BasePathProcessor):
def __init__(self, reverse=False): def __init__(self, reverse=False):
self.paths = [] super(ZigZagCutter, self).__init__()
self.curr_path = None self.curr_path = None
self.scanline = None self.scanline = None
self.curr_scanline = None self.curr_scanline = None
self.reverse = reverse self.reverse = reverse
def append(self, p): def append(self, point):
curr_path = None curr_path = None
if self.curr_path == None: if self.curr_path == None:
curr_path = Path() curr_path = Path()
...@@ -41,7 +41,7 @@ class ZigZagCutter(pycam.PathProcessors.BasePathProcessor): ...@@ -41,7 +41,7 @@ class ZigZagCutter(pycam.PathProcessors.BasePathProcessor):
curr_path = self.curr_path curr_path = self.curr_path
self.curr_path = None self.curr_path = None
curr_path.append(p) curr_path.append(point)
if self.curr_path == None: if self.curr_path == None:
if (self.scanline % 2) == 0: if (self.scanline % 2) == 0:
......
...@@ -26,6 +26,9 @@ __all__ = ["PathAccumulator", "SimpleCutter", "ZigZagCutter", "PolygonCutter", ...@@ -26,6 +26,9 @@ __all__ = ["PathAccumulator", "SimpleCutter", "ZigZagCutter", "PolygonCutter",
class BasePathProcessor(object): class BasePathProcessor(object):
def __init__(self):
self.paths = []
def new_direction(self, direction): def new_direction(self, direction):
pass pass
...@@ -37,9 +40,11 @@ class BasePathProcessor(object): ...@@ -37,9 +40,11 @@ class BasePathProcessor(object):
def sort_layered(self, upper_first=True): def sort_layered(self, upper_first=True):
if upper_first: if upper_first:
compare_height = lambda path1, path2: path1.points[0].z < path2.points[0].z compare_height = lambda path1, path2: \
path1.points[0].z < path2.points[0].z
else: else:
compare_height = lambda path1, path2: path1.points[0].z > path2.points[0].z compare_height = lambda path1, path2: \
path1.points[0].z > path2.points[0].z
finished = False finished = False
while not finished: while not finished:
index = 0 index = 0
......
...@@ -68,7 +68,8 @@ def convert_triangles_to_vertices_faces(triangles): ...@@ -68,7 +68,8 @@ def convert_triangles_to_vertices_faces(triangles):
id_index_map = {} id_index_map = {}
for t in triangles: for t in triangles:
coords = [] coords = []
# TODO: check if we need to change the order of points for non-AOI models as well # TODO: check if we need to change the order of points for non-AOI
# models as well.
for p in (t.p1, t.p3, t.p2): for p in (t.p1, t.p3, t.p2):
# add the point to the id/index mapping, if necessary # add the point to the id/index mapping, if necessary
if not id_index_map.has_key(p.id): if not id_index_map.has_key(p.id):
...@@ -199,7 +200,7 @@ class PhysicalWorld(object): ...@@ -199,7 +200,7 @@ class PhysicalWorld(object):
http://sourceforge.net/tracker/index.php?func=detail&aid=2973876&group_id=24884&atid=382799 http://sourceforge.net/tracker/index.php?func=detail&aid=2973876&group_id=24884&atid=382799
""" """
minz, maxz = geom.getAABB()[-2:] minz, maxz = geom.getAABB()[-2:]
currx, curry, currz = geom.getPosition() currx, curry = geom.getPosition()[0:2]
ray = ode.GeomRay(self._space, maxz-minz) ray = ode.GeomRay(self._space, maxz-minz)
ray.set((currx, curry, maxz), (0.0, 0.0, -1.0)) ray.set((currx, curry, maxz), (0.0, 0.0, -1.0))
return [ray] return [ray]
......
...@@ -67,14 +67,14 @@ def get_all_ips(): ...@@ -67,14 +67,14 @@ def get_all_ips():
ips = socket.gethostbyname_ex(name) ips = socket.gethostbyname_ex(name)
if len(ips) == 3: if len(ips) == 3:
return ips[2] return ips[2]
except socket.gaiaerror: except socket.gaierror:
return [] return []
result.extend(get_ips_of_name(socket.gethostname())) result.extend(get_ips_of_name(socket.gethostname()))
result.extend(get_ips_of_name("localhost")) result.extend(get_ips_of_name("localhost"))
filtered_result = [] filtered_result = []
for ip in result: for one_ip in result:
if not ip in filtered_result: if not one_ip in filtered_result:
filtered_result.append(ip) filtered_result.append(one_ip)
def sort_ip_by_relevance(ip1, ip2): def sort_ip_by_relevance(ip1, ip2):
if ip1.startswith("127."): if ip1.startswith("127."):
return 1 return 1
...@@ -96,7 +96,7 @@ def get_external_program_location(key): ...@@ -96,7 +96,7 @@ def get_external_program_location(key):
# check the windows path via win32api # check the windows path via win32api
try: try:
import win32api import win32api
handle, location = win32api.FindExecutable(key) location = win32api.FindExecutable(key)[1]
if location: if location:
return location return location
except: except:
...@@ -114,7 +114,8 @@ def get_external_program_location(key): ...@@ -114,7 +114,8 @@ def get_external_program_location(key):
# do a manual scan in the programs directory (only for windows) # do a manual scan in the programs directory (only for windows)
try: try:
from win32com.shell import shellcon, shell from win32com.shell import shellcon, shell
program_dir = shell.SHGetFolderPath(0, shellcon.CSIDL_PROGRAM_FILES, 0, 0) program_dir = shell.SHGetFolderPath(0, shellcon.CSIDL_PROGRAM_FILES,
0, 0)
except ImportError: except ImportError:
# no other options for non-windows systems # no other options for non-windows systems
return None return None
......
...@@ -29,26 +29,26 @@ class Iterator: ...@@ -29,26 +29,26 @@ class Iterator:
if self.ind >= len(self.seq): if self.ind >= len(self.seq):
return None return None
else: else:
v = self.seq[self.ind] item = self.seq[self.ind]
self.ind += 1 self.ind += 1
return v return item
def insertBefore(self, v): def insertBefore(self, item):
self.seq.insert(self.ind - 1, v) self.seq.insert(self.ind - 1, item)
self.ind += 1 self.ind += 1
def insert(self, v): def insert(self, item):
self.seq.insert(self.ind, v) self.seq.insert(self.ind, item)
self.ind += 1 self.ind += 1
def replace(self, v, w): def replace(self, item_old, item_new):
for i in range(len(self.seq)): for i in range(len(self.seq)):
if self.seq[i] == v: if self.seq[i] == item_old:
self.seq[i] = w self.seq[i] = item_new
def remove(self, v): def remove(self, item):
for i in range(len(self.seq)): for i in range(len(self.seq)):
if self.seq[i] == v: if self.seq[i] == item:
del self.seq[i] del self.seq[i]
if i < self.ind: if i < self.ind:
self.ind -= 1 self.ind -= 1
...@@ -72,6 +72,7 @@ class Iterator: ...@@ -72,6 +72,7 @@ class Iterator:
def remains(self): def remains(self):
return len(self.seq) - self.ind return len(self.seq) - self.ind
class CyclicIterator: class CyclicIterator:
def __init__(self, seq, start=0): def __init__(self, seq, start=0):
self.seq = seq self.seq = seq
...@@ -79,11 +80,11 @@ class CyclicIterator: ...@@ -79,11 +80,11 @@ class CyclicIterator:
self.count = len(seq) self.count = len(seq)
def next(self): def next(self):
v = self.seq[self.ind] item = self.seq[self.ind]
self.ind += 1 self.ind += 1
if self.ind == len(self.seq): if self.ind == len(self.seq):
self.ind = 0 self.ind = 0
return v return item
def copy(self): def copy(self):
return CyclicIterator(self.seq, self.ind) return CyclicIterator(self.seq, self.ind)
...@@ -94,6 +95,7 @@ class CyclicIterator: ...@@ -94,6 +95,7 @@ class CyclicIterator:
idx -= len(self.seq) idx -= len(self.seq)
return self.seq[idx] return self.seq[idx]
if __name__ == "__main__": if __name__ == "__main__":
l = [1, 2, 4, 6] l = [1, 2, 4, 6]
print "l=", l print "l=", l
......
...@@ -22,6 +22,8 @@ along with PyCAM. If not, see <http://www.gnu.org/licenses/>. ...@@ -22,6 +22,8 @@ along with PyCAM. If not, see <http://www.gnu.org/licenses/>.
import locale import locale
import logging import logging
import re
def get_logger(suffix=None): def get_logger(suffix=None):
name = "PyCAM" name = "PyCAM"
......
...@@ -151,11 +151,13 @@ class ManagerInfo(object): ...@@ -151,11 +151,13 @@ class ManagerInfo(object):
def init_threading(number_of_processes=None, enable_server=False, remote=None, def init_threading(number_of_processes=None, enable_server=False, remote=None,
run_server=False, server_credentials="", local_port=DEFAULT_PORT): run_server=False, server_credentials="", local_port=DEFAULT_PORT):
global __multiprocessing, __num_of_processes, __manager, __closing, __task_source_uuid global __multiprocessing, __num_of_processes, __manager, __closing,
__task_source_uuid
if __multiprocessing: if __multiprocessing:
# kill the manager and clean everything up for a re-initialization # kill the manager and clean everything up for a re-initialization
cleanup() cleanup()
if (not is_windows_parallel_processing_available()) and (enable_server or run_server): if (not is_windows_parallel_processing_available()) and \
(enable_server or run_server):
# server mode is disabled for the Windows pyinstaller standalone # server mode is disabled for the Windows pyinstaller standalone
# due to "pickle errors". How to reproduce: run the standalone binary # due to "pickle errors". How to reproduce: run the standalone binary
# with "--enable-server --server-auth-key foo". # with "--enable-server --server-auth-key foo".
...@@ -165,11 +167,11 @@ def init_threading(number_of_processes=None, enable_server=False, remote=None, ...@@ -165,11 +167,11 @@ def init_threading(number_of_processes=None, enable_server=False, remote=None,
if enable_server: if enable_server:
log.warn("Unable to enable server mode with the Windows " \ log.warn("Unable to enable server mode with the Windows " \
+ "standalone executable. " \ + "standalone executable. " \
+ multiprocessing_missing_text) + server_mode_unavailable)
elif run_server: elif run_server:
log.warn("Unable to run in server-only mode with the Windows " \ log.warn("Unable to run in server-only mode with the Windows " \
+ "standalone executable. " \ + "standalone executable. " \
+ multiprocessing_missing_text) + server_mode_unavailable)
else: else:
# no further warnings required # no further warnings required
pass pass
...@@ -217,30 +219,34 @@ def init_threading(number_of_processes=None, enable_server=False, remote=None, ...@@ -217,30 +219,34 @@ def init_threading(number_of_processes=None, enable_server=False, remote=None,
if number_of_processes is None: if number_of_processes is None:
# use defaults # use defaults
# don't enable threading for a single cpu # don't enable threading for a single cpu
if (multiprocessing.cpu_count() > 1) or remote or run_server or enable_server: if (multiprocessing.cpu_count() > 1) or remote or run_server or \
enable_server:
__multiprocessing = multiprocessing __multiprocessing = multiprocessing
__num_of_processes = multiprocessing.cpu_count() __num_of_processes = multiprocessing.cpu_count()
else: else:
__multiprocessing = False __multiprocessing = False
elif (number_of_processes < 1) and (remote is None) and (enable_server is None): elif (number_of_processes < 1) and (remote is None) and \
# zero processes are allowed if we use a remote server or offer a server (enable_server is None):
# Zero processes are allowed if we use a remote server or offer a
# server.
__multiprocessing = False __multiprocessing = False
else: else:
__multiprocessing = multiprocessing __multiprocessing = multiprocessing
__num_of_processes = number_of_processes __num_of_processes = number_of_processes
# initialize the manager # initialize the manager
if not __multiprocessing: if not __multiprocessing:
__manager == None __manager = None
log.info("Disabled parallel processing") log.info("Disabled parallel processing")
elif not enable_server and not run_server: elif not enable_server and not run_server:
__manager == None __manager = None
log.info("Enabled %d parallel local processes" % __num_of_processes) log.info("Enabled %d parallel local processes" % __num_of_processes)
else: else:
# with multiprocessing # with multiprocessing
log.info("Enabled %d parallel local processes" % __num_of_processes) log.info("Enabled %d parallel local processes" % __num_of_processes)
log.info("Allow remote processing") log.info("Allow remote processing")
# initialize the uuid list for all workers # initialize the uuid list for all workers
worker_uuid_list = [str(uuid.uuid1()) for index in range(__num_of_processes)] worker_uuid_list = [str(uuid.uuid1())
for index in range(__num_of_processes)]
__task_source_uuid = str(uuid.uuid1()) __task_source_uuid = str(uuid.uuid1())
if remote is None: if remote is None:
# try to guess an appropriate interface for binding # try to guess an appropriate interface for binding
...@@ -249,11 +255,13 @@ def init_threading(number_of_processes=None, enable_server=False, remote=None, ...@@ -249,11 +255,13 @@ def init_threading(number_of_processes=None, enable_server=False, remote=None,
all_ips = pycam.Utils.get_all_ips() all_ips = pycam.Utils.get_all_ips()
if all_ips: if all_ips:
address = (all_ips[0], local_port) address = (all_ips[0], local_port)
log.info("Binding to local interface with IP %s" % str(all_ips[0])) log.info("Binding to local interface with IP %s" % \
str(all_ips[0]))
else: else:
return "Failed to find any local IP" return "Failed to find any local IP"
else: else:
# empty hostname -> wildcard interface (does not work with windows) # empty hostname -> wildcard interface
# (this does not work with Windows - see above)
address = ('', local_port) address = ('', local_port)
else: else:
if ":" in remote: if ":" in remote:
...@@ -261,8 +269,9 @@ def init_threading(number_of_processes=None, enable_server=False, remote=None, ...@@ -261,8 +269,9 @@ def init_threading(number_of_processes=None, enable_server=False, remote=None,
try: try:
port = int(port) port = int(port)
except ValueError: except ValueError:
log.warning(("Invalid port specified: '%s' - using default " \ log.warning(("Invalid port specified: '%s' - using " + \
+ "port (%d) instead") % (port, DEFAULT_PORT)) "default port (%d) instead") % \
(port, DEFAULT_PORT))
port = DEFAULT_PORT port = DEFAULT_PORT
else: else:
host = remote host = remote
...@@ -274,12 +283,14 @@ def init_threading(number_of_processes=None, enable_server=False, remote=None, ...@@ -274,12 +283,14 @@ def init_threading(number_of_processes=None, enable_server=False, remote=None,
statistics = ProcessStatistics() statistics = ProcessStatistics()
cache = ProcessDataCache() cache = ProcessDataCache()
pending_tasks = PendingTasks() pending_tasks = PendingTasks()
info = ManagerInfo(tasks_queue, results_queue, statistics, cache, pending_tasks) info = ManagerInfo(tasks_queue, results_queue, statistics, cache,
pending_tasks)
TaskManager.register("tasks", callable=info.get_tasks_queue) TaskManager.register("tasks", callable=info.get_tasks_queue)
TaskManager.register("results", callable=info.get_results_queue) TaskManager.register("results", callable=info.get_results_queue)
TaskManager.register("statistics", callable=info.get_statistics) TaskManager.register("statistics", callable=info.get_statistics)
TaskManager.register("cache", callable=info.get_cache) TaskManager.register("cache", callable=info.get_cache)
TaskManager.register("pending_tasks", callable=info.get_pending_tasks) TaskManager.register("pending_tasks",
callable=info.get_pending_tasks)
else: else:
TaskManager.register("tasks") TaskManager.register("tasks")
TaskManager.register("results") TaskManager.register("results")
...@@ -305,14 +316,16 @@ def init_threading(number_of_processes=None, enable_server=False, remote=None, ...@@ -305,14 +316,16 @@ def init_threading(number_of_processes=None, enable_server=False, remote=None,
__closing = __manager.Value("b", False) __closing = __manager.Value("b", False)
if __num_of_processes > 0: if __num_of_processes > 0:
# only start the spawner, if we want to use local workers # only start the spawner, if we want to use local workers
spawner = __multiprocessing.Process(name="spawn", target=_spawn_daemon, spawner = __multiprocessing.Process(name="spawn",
args=(__manager, __num_of_processes, worker_uuid_list)) target=_spawn_daemon, args=(__manager, __num_of_processes,
worker_uuid_list))
spawner.start() spawner.start()
else: else:
spawner = None spawner = None
# wait forever - in case of a server # wait forever - in case of a server
if run_server: if run_server:
log.info("Running a local server and waiting for remote connections.") log.info("Running a local server and waiting for remote " + \
"connections.")
# the server can be stopped via CTRL-C - it is caught later # the server can be stopped via CTRL-C - it is caught later
if not spawner is None: if not spawner is None:
spawner.join() spawner.join()
...@@ -323,7 +336,7 @@ def cleanup(): ...@@ -323,7 +336,7 @@ def cleanup():
log.debug("Shutting down process handler") log.debug("Shutting down process handler")
try: try:
__closing.set(True) __closing.set(True)
except IOError, EOFError: except (IOError, EOFError):
log.debug("Connection to manager lost during cleanup") log.debug("Connection to manager lost during cleanup")
# Only managers that were started via ".start()" implement a "shutdown". # Only managers that were started via ".start()" implement a "shutdown".
# Managers started via ".connect" may skip this. # Managers started via ".connect" may skip this.
...@@ -380,9 +393,9 @@ def _spawn_daemon(manager, number_of_processes, worker_uuid_list): ...@@ -380,9 +393,9 @@ def _spawn_daemon(manager, number_of_processes, worker_uuid_list):
# set the "closing" flag and just exit # set the "closing" flag and just exit
try: try:
__closing.set(True) __closing.set(True)
except IOError, EOFError: except (IOError, EOFError):
pass pass
except IOError, EOFError: except (IOError, EOFError):
# the connection was closed # the connection was closed
log.info("Spawner daemon lost connection to server") log.info("Spawner daemon lost connection to server")
...@@ -452,7 +465,8 @@ def _handle_tasks(tasks, results, stats, cache, pending_tasks, closing): ...@@ -452,7 +465,8 @@ def _handle_tasks(tasks, results, stats, cache, pending_tasks, closing):
def run_in_parallel_remote(func, args_list, unordered=False, def run_in_parallel_remote(func, args_list, unordered=False,
disable_multiprocessing=False, callback=None): disable_multiprocessing=False, callback=None):
global __multiprocessing, __num_of_processes, __manager, __task_source_uuid, __finished_jobs global __multiprocessing, __num_of_processes, __manager, __task_source_uuid,
__finished_jobs
if __multiprocessing is None: if __multiprocessing is None:
# threading was not configured before # threading was not configured before
init_threading() init_threading()
...@@ -475,7 +489,8 @@ def run_in_parallel_remote(func, args_list, unordered=False, ...@@ -475,7 +489,8 @@ def run_in_parallel_remote(func, args_list, unordered=False,
if hasattr(arg, "uuid"): if hasattr(arg, "uuid"):
data_uuid = ProcessDataCacheItemID(arg.uuid) data_uuid = ProcessDataCacheItemID(arg.uuid)
if not remote_cache.contains(data_uuid): if not remote_cache.contains(data_uuid):
log.debug("Adding cache item for job %s: %s - %s" % (job_id, arg.uuid, arg.__class__)) log.debug("Adding cache item for job %s: %s - %s" % \
(job_id, arg.uuid, arg.__class__))
remote_cache.add(data_uuid, arg) remote_cache.add(data_uuid, arg)
result_args.append(data_uuid) result_args.append(data_uuid)
elif isinstance(arg, (list, set, tuple)): elif isinstance(arg, (list, set, tuple)):
...@@ -498,7 +513,8 @@ def run_in_parallel_remote(func, args_list, unordered=False, ...@@ -498,7 +513,8 @@ def run_in_parallel_remote(func, args_list, unordered=False,
else: else:
result_args.append(arg) result_args.append(arg)
tasks_queue.put((job_id, index, func, result_args)) tasks_queue.put((job_id, index, func, result_args))
stats.add_queueing_time(__task_source_uuid, time.time() - start_time) stats.add_queueing_time(__task_source_uuid,
time.time() - start_time)
log.debug("Added %d tasks for job %s" % (len(args_list), job_id)) log.debug("Added %d tasks for job %s" % (len(args_list), job_id))
result_buffer = {} result_buffer = {}
index = 0 index = 0
......
...@@ -20,7 +20,8 @@ You should have received a copy of the GNU General Public License ...@@ -20,7 +20,8 @@ You should have received a copy of the GNU General Public License
along with PyCAM. If not, see <http://www.gnu.org/licenses/>. along with PyCAM. If not, see <http://www.gnu.org/licenses/>.
""" """
__all__=["Cutters","Exporters","Geometry","Gui","Importers","PathGenerators","PathProcessors","Utils"] __all__ = ["Cutters", "Exporters", "Geometry", "Gui", "Importers",
"PathGenerators", "PathProcessors", "Utils"]
VERSION = "0.4.1-svn" VERSION = "0.4.1-svn"
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment