Commit f84adff7 authored by sumpfralle's avatar sumpfralle

first batch of style fixes according to pylint (including minor typos)


git-svn-id: https://pycam.svn.sourceforge.net/svnroot/pycam/trunk@974 bbaffbd6-741e-11dd-a85d-61de82d9cad9
parent 7edff56e
......@@ -55,7 +55,7 @@ import time
try:
import multiprocessing
except ImportError:
class multiprocessing:
class multiprocessing(object):
# use an arbitrary other Exception
AuthenticationError = socket.error
......@@ -89,7 +89,8 @@ def show_gui(inputfile=None, task_settings_file=None):
full_report.append("Details:")
full_report.append(report_gtk)
full_report.append("")
full_report.append("Detailed list of requirements: %s" % GuiCommon.REQUIREMENTS_LINK)
full_report.append("Detailed list of requirements: %s" % \
GuiCommon.REQUIREMENTS_LINK)
log.critical(os.linesep.join(full_report))
return EXIT_CODES["requirements"]
......@@ -134,7 +135,7 @@ def load_model_file(filename, program_locations, unit=None):
if not os.path.isfile(filename):
log.warn("The input file ('%s') was not found!" % filename)
return None
filetype, importer = pycam.Importers.detect_file_type(filename)
importer = pycam.Importers.detect_file_type(filename)[1]
model = importer(filename, program_locations=program_locations, unit=unit)
if model is None:
log.warn("Failed to load the model file (%s)." % filename)
......@@ -180,13 +181,14 @@ def execute(parser, opts, args, pycam):
# print only the bare version number
print VERSION
else:
print "PyCAM %s" % VERSION
print "Copyright (C) 2008-2010 Lode Leroy"
print "Copyright (C) 2010 Lars Kruse"
print
print "License GPLv3+: GNU GPL version 3 or later <http://gnu.org/licenses/gpl.html>."
print "This is free software: you are free to change and redistribute it."
print "There is NO WARRANTY, to the extent permitted by law."
text = '''PyCAM %s
Copyright (C) 2008-2010 Lode Leroy
Copyright (C) 2010-2011 Lars Kruse
License GPLv3+: GNU GPL version 3 or later <http://gnu.org/licenses/gpl.html>.
This is free software: you are free to change and redistribute it.
There is NO WARRANTY, to the extent permitted by law.''' % VERSION
print text
return EXIT_CODES["ok"]
if not opts.disable_psyco:
......@@ -271,7 +273,7 @@ def execute(parser, opts, args, pycam):
elif opts.support_type == "none":
pass
else:
raise NotImplemented, "Invalid support type specified: %s" % \
raise NotImplementedError, "Invalid support type specified: %s" % \
opts.support_type
if opts.collision_engine == "ode":
tps.set_calculation_backend("ODE")
......@@ -413,7 +415,7 @@ def execute(parser, opts, args, pycam):
handler, closer = get_output_handler(opts.export_task_config)
if handler is None:
return EXIT_CODES["write_output_failed"]
print >>handler, tps.get_string()
print >> handler, tps.get_string()
closer()
# no error -> don't return a specific exit code
return None
......@@ -434,8 +436,8 @@ if __name__ == "__main__":
+ "in batch mode. Most parameters are useful only for " \
+ "batch mode.",
epilog="Take a look at the wiki for more information: " \
+ "http://sourceforge.net/apps/mediawiki/pycam/.\n" \
+ "Bug reports: http://sourceforge.net/tracker/?group_id=237831&atid=1104176")
+ "http://sourceforge.net/apps/mediawiki/pycam/.\nBug reports" \
+ ": http://sf.net/tracker/?group_id=237831&atid=1104176")
group_general = parser.add_option_group("General options")
group_export = parser.add_option_group("Export formats",
"Export the resulting toolpath or meta-data in various formats. " \
......@@ -512,8 +514,8 @@ if __name__ == "__main__":
+ "connecting to a remote server or for granting access " \
+ "to remote clients.")
group_general.add_option("-q", "--quiet", dest="quiet",
default=False, action="store_true", help="output only warnings and " \
+ "errors.")
default=False, action="store_true", help="output only warnings " \
+ "and errors.")
group_general.add_option("-d", "--debug", dest="debug",
default=False, action="store_true", help="enable output of debug " \
+ "messages.")
......
......@@ -27,56 +27,56 @@ from pycam.Toolpath import simplify_toolpath
class ContourCutter(pycam.PathProcessors.BasePathProcessor):
def __init__(self, reverse=False):
self.paths = []
super(ContourCutter, self).__init__()
self.curr_path = None
self.scanline = None
self.pe = None
self.polygon_extractor = None
self.points = []
self.reverse = reverse
self.__forward = Point(1, 1, 0)
def append(self, p):
def append(self, point):
# Sort the points in positive x/y direction - otherwise the
# PolygonExtractor breaks.
if self.points and (p.sub(self.points[0]).dot(self.__forward) < 0):
self.points.insert(0, p)
if self.points and (point.sub(self.points[0]).dot(self.__forward) < 0):
self.points.insert(0, point)
else:
self.points.append(p)
self.points.append(point)
def new_direction(self, direction):
if self.pe == None:
self.pe = PolygonExtractor(PolygonExtractor.CONTOUR)
if self.polygon_extractor == None:
self.polygon_extractor = PolygonExtractor(PolygonExtractor.CONTOUR)
self.pe.new_direction(direction)
self.polygon_extractor.new_direction(direction)
def end_direction(self):
self.pe.end_direction()
self.polygon_extractor.end_direction()
def new_scanline(self):
self.pe.new_scanline()
self.polygon_extractor.new_scanline()
self.points = []
def end_scanline(self):
for i in range(1, len(self.points)-1):
self.pe.append(self.points[i])
self.pe.end_scanline()
for i in range(1, len(self.points) - 1):
self.polygon_extractor.append(self.points[i])
self.polygon_extractor.end_scanline()
def finish(self):
self.pe.finish()
if self.pe.merge_path_list:
paths = self.pe.merge_path_list
elif self.pe.hor_path_list:
paths = self.pe.hor_path_list
self.polygon_extractor.finish()
if self.polygon_extractor.merge_path_list:
paths = self.polygon_extractor.merge_path_list
elif self.polygon_extractor.hor_path_list:
paths = self.polygon_extractor.hor_path_list
else:
paths = self.pe.ver_path_list
paths = self.polygon_extractor.ver_path_list
if paths:
for p in paths:
p.append(p.points[0])
simplify_toolpath(p)
for path in paths:
path.append(path.points[0])
simplify_toolpath(path)
if paths:
if self.reverse:
paths.reverse()
self.paths.extend(paths)
self.sort_layered()
self.pe = None
self.polygon_extractor = None
......@@ -28,19 +28,19 @@ from pycam.Geometry.Path import Path
class PathAccumulator(pycam.PathProcessors.BasePathProcessor):
def __init__(self, zigzag=False, reverse=False):
self.paths = []
super(PathAccumulator, self).__init__()
self.curr_path = None
self.zigzag = zigzag
self.scanline = None
self.reverse = reverse
def append(self, p):
def append(self, point):
if self.curr_path == None:
self.curr_path = Path()
if self.reverse:
self.curr_path.insert(0, p)
self.curr_path.insert(0, point)
else:
self.curr_path.append(p)
self.curr_path.append(point)
def new_direction(self, direction):
self.scanline = 0
......
......@@ -29,51 +29,51 @@ from pycam.Toolpath import simplify_toolpath
class PolygonCutter(pycam.PathProcessors.BasePathProcessor):
def __init__(self, reverse=False):
self.paths = []
super(PolygonCutter, self).__init__()
self.curr_path = None
self.scanline = None
self.pe = PolygonExtractor(PolygonExtractor.MONOTONE)
self.poly_extractor = PolygonExtractor(PolygonExtractor.MONOTONE)
self.reverse = reverse
def append(self, p):
self.pe.append(p)
def append(self, point):
self.poly_extractor.append(point)
def new_direction(self, direction):
self.pe.new_direction(direction)
self.poly_extractor.new_direction(direction)
def end_direction(self):
self.pe.end_direction()
self.poly_extractor.end_direction()
def new_scanline(self):
self.pe.new_scanline()
self.poly_extractor.new_scanline()
def end_scanline(self):
self.pe.end_scanline()
self.poly_extractor.end_scanline()
def finish(self):
self.pe.finish()
self.poly_extractor.finish()
paths = []
source_paths = []
if self.pe.hor_path_list:
source_paths.extend(self.pe.hor_path_list)
if self.pe.ver_path_list:
source_paths.extend(self.pe.ver_path_list)
if self.poly_extractor.hor_path_list:
source_paths.extend(self.poly_extractor.hor_path_list)
if self.poly_extractor.ver_path_list:
source_paths.extend(self.poly_extractor.ver_path_list)
for path in source_paths:
points = path.points
for i in range(0, (len(points)+1)/2):
p = Path()
new_path = Path()
if i % 2 == 0:
p.append(points[i])
p.append(points[-i-1])
new_path.append(points[i])
new_path.append(points[-i-1])
else:
p.append(points[-i-1])
p.append(points[i])
paths.append(p)
new_path.append(points[-i-1])
new_path.append(points[i])
paths.append(new_path)
if paths:
for p in paths:
simplify_toolpath(p)
for path in paths:
simplify_toolpath(path)
if self.reverse:
p.reverse()
path.reverse()
self.paths.extend(paths)
self.sort_layered()
......@@ -26,11 +26,11 @@ from pycam.Toolpath import simplify_toolpath
class SimpleCutter(pycam.PathProcessors.BasePathProcessor):
def __init__(self, reverse=False):
self.paths = []
super(SimpleCutter, self).__init__()
self.curr_path = None
self.reverse = reverse
def append(self, p):
def append(self, point):
curr_path = None
if self.curr_path == None:
curr_path = Path()
......@@ -38,7 +38,7 @@ class SimpleCutter(pycam.PathProcessors.BasePathProcessor):
else:
curr_path = self.curr_path
self.curr_path = None
curr_path.append(p)
curr_path.append(point)
if self.curr_path == None:
simplify_toolpath(curr_path)
if self.reverse:
......
......@@ -26,13 +26,13 @@ from pycam.Toolpath import simplify_toolpath
class ZigZagCutter(pycam.PathProcessors.BasePathProcessor):
def __init__(self, reverse=False):
self.paths = []
super(ZigZagCutter, self).__init__()
self.curr_path = None
self.scanline = None
self.curr_scanline = None
self.reverse = reverse
def append(self, p):
def append(self, point):
curr_path = None
if self.curr_path == None:
curr_path = Path()
......@@ -41,7 +41,7 @@ class ZigZagCutter(pycam.PathProcessors.BasePathProcessor):
curr_path = self.curr_path
self.curr_path = None
curr_path.append(p)
curr_path.append(point)
if self.curr_path == None:
if (self.scanline % 2) == 0:
......
......@@ -26,6 +26,9 @@ __all__ = ["PathAccumulator", "SimpleCutter", "ZigZagCutter", "PolygonCutter",
class BasePathProcessor(object):
def __init__(self):
self.paths = []
def new_direction(self, direction):
pass
......@@ -37,9 +40,11 @@ class BasePathProcessor(object):
def sort_layered(self, upper_first=True):
if upper_first:
compare_height = lambda path1, path2: path1.points[0].z < path2.points[0].z
compare_height = lambda path1, path2: \
path1.points[0].z < path2.points[0].z
else:
compare_height = lambda path1, path2: path1.points[0].z > path2.points[0].z
compare_height = lambda path1, path2: \
path1.points[0].z > path2.points[0].z
finished = False
while not finished:
index = 0
......
......@@ -68,7 +68,8 @@ def convert_triangles_to_vertices_faces(triangles):
id_index_map = {}
for t in triangles:
coords = []
# TODO: check if we need to change the order of points for non-AOI models as well
# TODO: check if we need to change the order of points for non-AOI
# models as well.
for p in (t.p1, t.p3, t.p2):
# add the point to the id/index mapping, if necessary
if not id_index_map.has_key(p.id):
......@@ -199,7 +200,7 @@ class PhysicalWorld(object):
http://sourceforge.net/tracker/index.php?func=detail&aid=2973876&group_id=24884&atid=382799
"""
minz, maxz = geom.getAABB()[-2:]
currx, curry, currz = geom.getPosition()
currx, curry = geom.getPosition()[0:2]
ray = ode.GeomRay(self._space, maxz-minz)
ray.set((currx, curry, maxz), (0.0, 0.0, -1.0))
return [ray]
......
......@@ -67,14 +67,14 @@ def get_all_ips():
ips = socket.gethostbyname_ex(name)
if len(ips) == 3:
return ips[2]
except socket.gaiaerror:
except socket.gaierror:
return []
result.extend(get_ips_of_name(socket.gethostname()))
result.extend(get_ips_of_name("localhost"))
filtered_result = []
for ip in result:
if not ip in filtered_result:
filtered_result.append(ip)
for one_ip in result:
if not one_ip in filtered_result:
filtered_result.append(one_ip)
def sort_ip_by_relevance(ip1, ip2):
if ip1.startswith("127."):
return 1
......@@ -96,7 +96,7 @@ def get_external_program_location(key):
# check the windows path via win32api
try:
import win32api
handle, location = win32api.FindExecutable(key)
location = win32api.FindExecutable(key)[1]
if location:
return location
except:
......@@ -114,7 +114,8 @@ def get_external_program_location(key):
# do a manual scan in the programs directory (only for windows)
try:
from win32com.shell import shellcon, shell
program_dir = shell.SHGetFolderPath(0, shellcon.CSIDL_PROGRAM_FILES, 0, 0)
program_dir = shell.SHGetFolderPath(0, shellcon.CSIDL_PROGRAM_FILES,
0, 0)
except ImportError:
# no other options for non-windows systems
return None
......
......@@ -29,26 +29,26 @@ class Iterator:
if self.ind >= len(self.seq):
return None
else:
v = self.seq[self.ind]
item = self.seq[self.ind]
self.ind += 1
return v
return item
def insertBefore(self, v):
self.seq.insert(self.ind - 1, v)
def insertBefore(self, item):
self.seq.insert(self.ind - 1, item)
self.ind += 1
def insert(self, v):
self.seq.insert(self.ind, v)
def insert(self, item):
self.seq.insert(self.ind, item)
self.ind += 1
def replace(self, v, w):
def replace(self, item_old, item_new):
for i in range(len(self.seq)):
if self.seq[i] == v:
self.seq[i] = w
if self.seq[i] == item_old:
self.seq[i] = item_new
def remove(self, v):
def remove(self, item):
for i in range(len(self.seq)):
if self.seq[i] == v:
if self.seq[i] == item:
del self.seq[i]
if i < self.ind:
self.ind -= 1
......@@ -72,6 +72,7 @@ class Iterator:
def remains(self):
return len(self.seq) - self.ind
class CyclicIterator:
def __init__(self, seq, start=0):
self.seq = seq
......@@ -79,11 +80,11 @@ class CyclicIterator:
self.count = len(seq)
def next(self):
v = self.seq[self.ind]
item = self.seq[self.ind]
self.ind += 1
if self.ind == len(self.seq):
self.ind = 0
return v
return item
def copy(self):
return CyclicIterator(self.seq, self.ind)
......@@ -94,6 +95,7 @@ class CyclicIterator:
idx -= len(self.seq)
return self.seq[idx]
if __name__ == "__main__":
l = [1, 2, 4, 6]
print "l=", l
......
......@@ -22,6 +22,8 @@ along with PyCAM. If not, see <http://www.gnu.org/licenses/>.
import locale
import logging
import re
def get_logger(suffix=None):
name = "PyCAM"
......
......@@ -151,11 +151,13 @@ class ManagerInfo(object):
def init_threading(number_of_processes=None, enable_server=False, remote=None,
run_server=False, server_credentials="", local_port=DEFAULT_PORT):
global __multiprocessing, __num_of_processes, __manager, __closing, __task_source_uuid
global __multiprocessing, __num_of_processes, __manager, __closing,
__task_source_uuid
if __multiprocessing:
# kill the manager and clean everything up for a re-initialization
cleanup()
if (not is_windows_parallel_processing_available()) and (enable_server or run_server):
if (not is_windows_parallel_processing_available()) and \
(enable_server or run_server):
# server mode is disabled for the Windows pyinstaller standalone
# due to "pickle errors". How to reproduce: run the standalone binary
# with "--enable-server --server-auth-key foo".
......@@ -165,11 +167,11 @@ def init_threading(number_of_processes=None, enable_server=False, remote=None,
if enable_server:
log.warn("Unable to enable server mode with the Windows " \
+ "standalone executable. " \
+ multiprocessing_missing_text)
+ server_mode_unavailable)
elif run_server:
log.warn("Unable to run in server-only mode with the Windows " \
+ "standalone executable. " \
+ multiprocessing_missing_text)
+ server_mode_unavailable)
else:
# no further warnings required
pass
......@@ -217,30 +219,34 @@ def init_threading(number_of_processes=None, enable_server=False, remote=None,
if number_of_processes is None:
# use defaults
# don't enable threading for a single cpu
if (multiprocessing.cpu_count() > 1) or remote or run_server or enable_server:
if (multiprocessing.cpu_count() > 1) or remote or run_server or \
enable_server:
__multiprocessing = multiprocessing
__num_of_processes = multiprocessing.cpu_count()
else:
__multiprocessing = False
elif (number_of_processes < 1) and (remote is None) and (enable_server is None):
# zero processes are allowed if we use a remote server or offer a server
elif (number_of_processes < 1) and (remote is None) and \
(enable_server is None):
# Zero processes are allowed if we use a remote server or offer a
# server.
__multiprocessing = False
else:
__multiprocessing = multiprocessing
__num_of_processes = number_of_processes
# initialize the manager
if not __multiprocessing:
__manager == None
__manager = None
log.info("Disabled parallel processing")
elif not enable_server and not run_server:
__manager == None
__manager = None
log.info("Enabled %d parallel local processes" % __num_of_processes)
else:
# with multiprocessing
log.info("Enabled %d parallel local processes" % __num_of_processes)
log.info("Allow remote processing")
# initialize the uuid list for all workers
worker_uuid_list = [str(uuid.uuid1()) for index in range(__num_of_processes)]
worker_uuid_list = [str(uuid.uuid1())
for index in range(__num_of_processes)]
__task_source_uuid = str(uuid.uuid1())
if remote is None:
# try to guess an appropriate interface for binding
......@@ -249,11 +255,13 @@ def init_threading(number_of_processes=None, enable_server=False, remote=None,
all_ips = pycam.Utils.get_all_ips()
if all_ips:
address = (all_ips[0], local_port)
log.info("Binding to local interface with IP %s" % str(all_ips[0]))
log.info("Binding to local interface with IP %s" % \
str(all_ips[0]))
else:
return "Failed to find any local IP"
else:
# empty hostname -> wildcard interface (does not work with windows)
# empty hostname -> wildcard interface
# (this does not work with Windows - see above)
address = ('', local_port)
else:
if ":" in remote:
......@@ -261,8 +269,9 @@ def init_threading(number_of_processes=None, enable_server=False, remote=None,
try:
port = int(port)
except ValueError:
log.warning(("Invalid port specified: '%s' - using default " \
+ "port (%d) instead") % (port, DEFAULT_PORT))
log.warning(("Invalid port specified: '%s' - using " + \
"default port (%d) instead") % \
(port, DEFAULT_PORT))
port = DEFAULT_PORT
else:
host = remote
......@@ -274,12 +283,14 @@ def init_threading(number_of_processes=None, enable_server=False, remote=None,
statistics = ProcessStatistics()
cache = ProcessDataCache()
pending_tasks = PendingTasks()
info = ManagerInfo(tasks_queue, results_queue, statistics, cache, pending_tasks)
info = ManagerInfo(tasks_queue, results_queue, statistics, cache,
pending_tasks)
TaskManager.register("tasks", callable=info.get_tasks_queue)
TaskManager.register("results", callable=info.get_results_queue)
TaskManager.register("statistics", callable=info.get_statistics)
TaskManager.register("cache", callable=info.get_cache)
TaskManager.register("pending_tasks", callable=info.get_pending_tasks)
TaskManager.register("pending_tasks",
callable=info.get_pending_tasks)
else:
TaskManager.register("tasks")
TaskManager.register("results")
......@@ -305,14 +316,16 @@ def init_threading(number_of_processes=None, enable_server=False, remote=None,
__closing = __manager.Value("b", False)
if __num_of_processes > 0:
# only start the spawner, if we want to use local workers
spawner = __multiprocessing.Process(name="spawn", target=_spawn_daemon,
args=(__manager, __num_of_processes, worker_uuid_list))
spawner = __multiprocessing.Process(name="spawn",
target=_spawn_daemon, args=(__manager, __num_of_processes,
worker_uuid_list))
spawner.start()
else:
spawner = None
# wait forever - in case of a server
if run_server:
log.info("Running a local server and waiting for remote connections.")
log.info("Running a local server and waiting for remote " + \
"connections.")
# the server can be stopped via CTRL-C - it is caught later
if not spawner is None:
spawner.join()
......@@ -323,7 +336,7 @@ def cleanup():
log.debug("Shutting down process handler")
try:
__closing.set(True)
except IOError, EOFError:
except (IOError, EOFError):
log.debug("Connection to manager lost during cleanup")
# Only managers that were started via ".start()" implement a "shutdown".
# Managers started via ".connect" may skip this.
......@@ -380,9 +393,9 @@ def _spawn_daemon(manager, number_of_processes, worker_uuid_list):
# set the "closing" flag and just exit
try:
__closing.set(True)
except IOError, EOFError:
except (IOError, EOFError):
pass
except IOError, EOFError:
except (IOError, EOFError):
# the connection was closed
log.info("Spawner daemon lost connection to server")
......@@ -452,7 +465,8 @@ def _handle_tasks(tasks, results, stats, cache, pending_tasks, closing):
def run_in_parallel_remote(func, args_list, unordered=False,
disable_multiprocessing=False, callback=None):
global __multiprocessing, __num_of_processes, __manager, __task_source_uuid, __finished_jobs
global __multiprocessing, __num_of_processes, __manager, __task_source_uuid,
__finished_jobs
if __multiprocessing is None:
# threading was not configured before
init_threading()
......@@ -475,7 +489,8 @@ def run_in_parallel_remote(func, args_list, unordered=False,
if hasattr(arg, "uuid"):
data_uuid = ProcessDataCacheItemID(arg.uuid)
if not remote_cache.contains(data_uuid):
log.debug("Adding cache item for job %s: %s - %s" % (job_id, arg.uuid, arg.__class__))
log.debug("Adding cache item for job %s: %s - %s" % \
(job_id, arg.uuid, arg.__class__))
remote_cache.add(data_uuid, arg)
result_args.append(data_uuid)
elif isinstance(arg, (list, set, tuple)):
......@@ -498,7 +513,8 @@ def run_in_parallel_remote(func, args_list, unordered=False,
else:
result_args.append(arg)
tasks_queue.put((job_id, index, func, result_args))
stats.add_queueing_time(__task_source_uuid, time.time() - start_time)
stats.add_queueing_time(__task_source_uuid,
time.time() - start_time)
log.debug("Added %d tasks for job %s" % (len(args_list), job_id))
result_buffer = {}
index = 0
......
......@@ -20,7 +20,8 @@ You should have received a copy of the GNU General Public License
along with PyCAM. If not, see <http://www.gnu.org/licenses/>.
"""
__all__=["Cutters","Exporters","Geometry","Gui","Importers","PathGenerators","PathProcessors","Utils"]
__all__ = ["Cutters", "Exporters", "Geometry", "Gui", "Importers",
"PathGenerators", "PathProcessors", "Utils"]
VERSION = "0.4.1-svn"
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment