Attached is revised patch.  Differs from previous by:

1) single patch file rather than multiple, and
2) fixes one typo found during further testing of piuparts-master and piuparts-slave.

diff --git a/piuparts-report.py b/piuparts-report.py
--- a/piuparts-report.py
+++ b/piuparts-report.py
@@ -421,13 +421,13 @@
         logger.addHandler(handler)
 
 
-def html_protect(str):
-    str = "&".join(str.split("&"))
-    str = "&lt;".join(str.split("<"))
-    str = "&gt;".join(str.split(">"))
-    str = "&#34;".join(str.split('"'))
-    str = "&#39;".join(str.split("'"))
-    return str
+def html_protect(vstr):
+    vstr = "&amp;".join(str.split("&"))
+    vstr = "&lt;".join(str.split("<"))
+    vstr = "&gt;".join(str.split(">"))
+    vstr = "&#34;".join(str.split('"'))
+    vstr = "&#39;".join(str.split("'"))
+    return vstr
 
 
 def emphasize_reason(reason):
@@ -457,8 +457,8 @@
             files += [os.path.join(dir,subdir, name_in_subdir)]
     # sort by age
     content = {}
-    for file in files:
-      content[file] = os.path.getmtime(os.path.join(dir,file))
+    for vfile in files:
+      content[vfile] = os.path.getmtime(os.path.join(dir,vfile))
     # Sort keys, based on time stamps
     files = content.keys()
     files.sort(lambda x,y: cmp(content[x],content[y]))
@@ -474,21 +474,21 @@
 
 
 def copy_logs(logs_by_dir, output_dir):
-    for dir in logs_by_dir:
-        fulldir = os.path.join(output_dir, dir)
+    for vdir in logs_by_dir:
+        fulldir = os.path.join(output_dir, vdir)
         if not os.path.exists(fulldir):
             os.makedirs(fulldir)
-        for basename in logs_by_dir[dir]:
-            source = os.path.join(dir, basename)
+        for basename in logs_by_dir[vdir]:
+            source = os.path.join(vdir, basename)
             target = os.path.join(fulldir, basename)
             update_file(source, target)
 
 def remove_old_logs(logs_by_dir, output_dir):
-    for dir in logs_by_dir:
-        fulldir = os.path.join(output_dir, dir)
+    for vdir in logs_by_dir:
+        fulldir = os.path.join(output_dir, vdir)
         if os.path.exists(fulldir):
             for basename in os.listdir(fulldir):
-                if basename not in logs_by_dir[dir]:
+                if basename not in logs_by_dir[vdir]:
                     os.remove(os.path.join(fulldir, basename))
 
 
@@ -600,28 +600,28 @@
 
 
     def print_by_dir(self, output_directory, logs_by_dir):
-        for dir in logs_by_dir:
-            list = []
-            for basename in logs_by_dir[dir]:
+        for vdir in logs_by_dir:
+            vlist = []
+            for basename in logs_by_dir[vdir]:
                 assert basename.endswith(".log")
                 assert "_" in basename
                 package, version = basename[:-len(".log")].split("_")
-                list.append((os.path.join(dir, basename), package, version))
-            self.write_log_list_page(os.path.join(output_directory, dir + ".html"),
-                                title_by_dir[dir], 
-                                desc_by_dir[dir], list)
+                vlist.append((os.path.join(vdir, basename), package, version))
+            self.write_log_list_page(os.path.join(output_directory, vdir + ".html"),
+                                title_by_dir[vdir], 
+                                desc_by_dir[vdir], vlist)
 
     def find_links_to_logs(self, package_name, dirs, logs_by_dir):
         links = []
-        for dir in dirs:
-          if dir == "fail":
+        for vdir in dirs:
+          if vdir == "fail":
             style = " class=\"needs-bugging\""
           else:
             style = ""
-          for basename in logs_by_dir[dir]:
+          for basename in logs_by_dir[vdir]:
             if basename.startswith(package_name+"_") and basename.endswith(".log"):
               package, version = basename[:-len(".log")].split("_")
-              links.append("<a href=\"/%s\"%s>%s</a>" % (os.path.join(self._config.section, dir, basename),style,html_protect(version)))
+              links.append("<a href=\"/%s\"%s>%s</a>" % (os.path.join(self._config.section, vdir, basename),style,html_protect(version)))
         return links
 
     def link_to_maintainer_summary(self, maintainer):
@@ -945,9 +945,9 @@
         for state in self._binary_db.get_states():
             dir_link = ""
             analysis = ""
-            for dir in dirs:
-              if dir in ("pass","fail","bugged") and state_by_dir[dir] == state:
-                dir_link += "<a href='%s.html'>%s</a> logs<br>" % (dir, html_protect(dir))
+            for vdir in dirs:
+              if vdir in ("pass","fail","bugged") and state_by_dir[vdir] == state:
+                dir_link += "<a href='%s.html'>%s</a> logs<br>" % (vdir, html_protect(vdir))
             if state in ("successfully-tested", "failed-testing"):
               analysis = self.create_and_link_to_analysises(state)
             tablerows += ("<tr class=\"normalrow\"><td class=\"contentcell2\"><a href='state-%s.html'>%s</a>%s</td>" +
@@ -974,20 +974,20 @@
     def write_state_pages(self):
         for state in self._binary_db.get_states():
             logging.debug("Writing page for %s" % state)
-            list = ""
+            vlist = ""
             for package in self._binary_db.get_packages_in_state(state):
-                list += "<li id=\"%s\">%s (%s)" % (
+                vlist += "<li id=\"%s\">%s (%s)" % (
                                          package["Package"],
                                          self.link_to_source_summary(package["Package"]),
                                          html_protect(package["Maintainer"]))
                 if package.dependencies():
-                    list += "\n<ul>\n"
+                    vlist += "\n<ul>\n"
                     for dep in package.dependencies():
-                        list += "<li>dependency %s is %s</li>\n" % \
+                        vlist += "<li>dependency %s is %s</li>\n" % \
                                   (self.link_to_state_page(self._config.section,dep,dep), 
                                   emphasize_reason(html_protect(self._binary_db.state_by_name(dep))))
-                    list += "</ul>\n"
-                list += "</li>\n"
+                    vlist += "</ul>\n"
+                vlist += "</li>\n"
             htmlpage = string.Template(HTML_HEADER + STATE_BODY_TEMPLATE + HTML_FOOTER)
             write_file(os.path.join(self._output_directory, "state-%s.html" % state), htmlpage.safe_substitute( {
                                         "page_title": html_protect("Packages in state "+state+" in "+self._config.section),
@@ -995,15 +995,15 @@
                                         "time": time.strftime("%Y-%m-%d %H:%M %Z"),
                                         "state": html_protect(state),
                                         "section": html_protect(self._config.section),
-                                        "list": list
+                                        "list": vlist
                                        }))
 
     def generate_html(self):
         logging.debug("Finding log files")
         dirs = ["pass", "fail", "bugged", "reserved", "untestable"]
         logs_by_dir = {}
-        for dir in dirs:
-            logs_by_dir[dir] = find_files_with_suffix(dir, ".log")
+        for vdir in dirs:
+            logs_by_dir[vdir] = find_files_with_suffix(vdir, ".log")
 
         logging.debug("Copying log files")
         copy_logs(logs_by_dir, self._output_directory)
diff --git a/piuparts-slave.py b/piuparts-slave.py
--- a/piuparts-slave.py
+++ b/piuparts-slave.py
@@ -201,12 +201,12 @@
         create_file(self._reserved_filename(name, version), "")
 
     def get_reserved(self):
-        list = []
+        vlist = []
         for basename in os.listdir("reserved"):
             if "_" in basename and basename.endswith(".log"):
                 name, version = basename[:-len(".log")].split("_", 1)
-                list.append((name, version))
-        return list
+                vlist.append((name, version))
+        return vlist
 
     def forget_reserved(self, name, version):
         try:
@@ -243,10 +243,10 @@
             create_chroot(self._config, self._config["upgrade-test-chroot-tgz"], 
                         self._config["upgrade-test-distros"].split()[0])
     
-        for dir in ["new", "pass", "fail"]:
-            dir = os.path.join(self._slave_directory, dir)
-            if not os.path.exists(os.path.join(self._slave_directory, dir)):
-                os.mkdir(dir)
+        for rdir in ["new", "pass", "fail"]:
+            rdir = os.path.join(self._slave_directory, rdir)
+            if not os.path.exists(rdir):
+                os.mkdir(rdir)
 
         self._slave = Slave()
         self._slave.set_master_host(master_host)
@@ -256,10 +256,10 @@
         self._idle_sleep=idle_sleep
         self._log_file=self._config["log-file"]
 
-        for dir in ["pass", "fail", "untestable", "reserved"]:
-            dir = os.path.join(self._slave_directory, dir)
-            if not os.path.exists(dir):
-                os.makedirs(dir)
+        for rdir in ["pass", "fail", "untestable", "reserved"]:
+            rdir = os.path.join(self._slave_directory, rdir)
+            if not os.path.exists(rdir):
+                os.makedirs(rdir)
         os.chdir(oldcwd)
 
     def run(self):
@@ -422,8 +422,8 @@
     arch = config["arch"]
     if not arch:
         # Try to figure it out ourselves, using dpkg
-        input, output = os.popen2("dpkg --print-architecture")
-        arch = output.read().rstrip()
+        vin, vout = os.popen2("dpkg --print-architecture")
+        arch = vout.read().rstrip()
     packages_url = \
         "%s/dists/%s/main/binary-%s/Packages.bz2" % (mirror, distro, arch)
 
diff --git a/piuparts.py b/piuparts.py
--- a/piuparts.py
+++ b/piuparts.py
@@ -285,10 +285,10 @@
 
 def do_on_panic(hook):
     global counter
-    id = counter
+    cid = counter
     counter += 1
-    on_panic_hooks[id] = hook
-    return id
+    on_panic_hooks[cid] = hook
+    return cid
 
 
 def dont_do_on_panic(id):
@@ -303,9 +303,9 @@
 
     def formatTime(self, record, datefmt):
         t = time.time() - self.startup_time
-        min = int(t / 60)
-        s = t % 60.0
-        return "%dm%.1fs" % (min, s)
+        t_min = int(t / 60)
+        t_sec = t % 60.0
+        return "%dm%.1fs" % (t_min, t_sec)
 
 
 DUMP = logging.DEBUG - 1
@@ -377,7 +377,7 @@
     env = os.environ.copy()
     env["LC_ALL"] = "C"
     env["LANGUAGES"] = ""
-    env["PIUPARTS_OBJECTS"] = ' '.join(str(object) for object in settings.testobjects )
+    env["PIUPARTS_OBJECTS"] = ' '.join(str(vobject) for vobject in settings.testobjects )
     p = subprocess.Popen(command, env=env, stdin=subprocess.PIPE, 
                          stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
     (output, _) = p.communicate()
@@ -582,7 +582,7 @@
     def create(self):
         """Create a chroot according to user's wishes."""
         self.create_temp_dir()
-        id = do_on_panic(self.remove)
+        cid = do_on_panic(self.remove)
 
         if settings.basetgz:
             self.unpack_from_tgz(settings.basetgz)
@@ -605,9 +605,9 @@
             if not os.path.exists(self.relative("tmp/scripts/")):
                 os.mkdir(dest)
             logging.debug("Copying scriptsdir to %s" % dest)
-            for file in os.listdir(settings.scriptsdir):
-                if (file.startswith("post_") or file.startswith("pre_")) and os.path.isfile(os.path.join((settings.scriptsdir), file)):
-                    shutil.copy(os.path.join((settings.scriptsdir), file), dest) 
+            for sfile in os.listdir(settings.scriptsdir):
+                if (sfile.startswith("post_") or sfile.startswith("pre_")) and os.path.isfile(os.path.join((settings.scriptsdir), sfile)):
+                    shutil.copy(os.path.join((settings.scriptsdir), sfile), dest) 
 
         # Run custom scripts after creating the chroot.
         if settings.scriptsdir is not None: 
@@ -616,7 +616,7 @@
         if settings.savetgz:
             self.pack_into_tgz(settings.savetgz)
 
-        dont_do_on_panic(id)
+        dont_do_on_panic(cid)
 
     def remove(self):
         """Remove a chroot and all its contents."""
@@ -831,11 +831,11 @@
     def get_selections(self):
         """Get current package selections in a chroot."""
         (status, output) = self.run(["dpkg", "--get-selections", "*"])
-        list = [line.split() for line in output.split("\n") if line.strip()]
-        dict = {}
-        for name, status in list:
-            dict[name] = status
-        return dict
+        vlist = [line.split() for line in output.split("\n") if line.strip()]
+        vdict = {}
+        for name, status in vlist:
+            vdict[name] = status
+        return vdict
 
     def remove_or_purge(self, operation, packages):
         """Remove or purge packages in a chroot."""
@@ -914,7 +914,7 @@
     def save_meta_data(self):
         """Return the filesystem meta data for all objects in the chroot."""
         root = os.path.join(self.name, ".")
-        dict = {}
+        vdict = {}
         proc = os.path.join(root, "proc")
         for dirpath, dirnames, filenames in os.walk(root):
             assert dirpath[:len(root)] == root
@@ -927,8 +927,8 @@
                     target = os.readlink(name)
                 else:
                     target = None
-                dict[name[len(root):]] = (st, target)
-        return dict
+                vdict[name[len(root):]] = (st, target)
+        return vdict
 
     def relative(self, pathname):
         if pathname.startswith('/'):
@@ -937,20 +937,20 @@
 
     def get_files_owned_by_packages(self):
         """Return dict[filename] = [packagenamelist]."""
-        dir = self.relative("var/lib/dpkg/info")
-        dict = {}
-        for basename in os.listdir(dir):
+        vdir = self.relative("var/lib/dpkg/info")
+        vdict = {}
+        for basename in os.listdir(vdir):
             if basename.endswith(".list"):
                 pkg = basename[:-len(".list")]
-                f = file(os.path.join(dir, basename), "r")
+                f = file(os.path.join(vdir, basename), "r")
                 for line in f:
                     pathname = line.strip()
-                    if pathname in dict:
-                        dict[pathname].append(pkg)
+                    if pathname in vdict:
+                        vdict[pathname].append(pkg)
                     else:
-                        dict[pathname] = [pkg]
+                        vdict[pathname] = [pkg]
                 f.close()
-        return dict
+        return vdict
 
     def install_packages_by_name(self, packages):
         if packages:
@@ -963,8 +963,7 @@
                 self.list_installed_files (pre_info, self.save_meta_data())
             else:
                 self.run(["apt-get", "-y", "install"] + packages)
-	    
-	    
+
     def check_for_no_processes(self):
         """Check there are no processes running inside the chroot."""
         (status, output) = run(["lsof", "-w", "+D", self.name], ignore_errors=True)
@@ -1040,16 +1039,16 @@
         """Check if the packages have cron files under /etc/cron.d and in case positive, 
         it returns the list of files. """
 
-        dir = self.relative("var/lib/dpkg/info")
-        list = []
+        vdir = self.relative("var/lib/dpkg/info")
+        vlist = []
         has_cronfiles  = False
         for p in packages:
             basename = p + ".list"
 
-	    if not os.path.exists(os.path.join(dir,basename)):
+	    if not os.path.exists(os.path.join(vdir,basename)):
                 continue
 
-            f = file(os.path.join(dir,basename), "r")
+            f = file(os.path.join(vdir,basename), "r")
             for line in f:
                 pathname = line.strip()
                 if pathname.startswith("/etc/cron."):
@@ -1060,25 +1059,25 @@
                         if (mode & stat.S_IEXEC): 
                             if not has_cronfiles:
                                 has_cronfiles = True
-                            list.append(pathname)
+                            vlist.append(pathname)
                             logging.info("Package " + p + " contains cron file: " + pathname)
             f.close()
 
-        return has_cronfiles, list
+        return has_cronfiles, vlist
 
     def check_output_cronfiles (self, list):
         """Check if a given list of cronfiles has any output. Executes 
 	cron file as cron would do (except for SHELL)"""
         failed = False
-        for file in list:
+        for vfile in list:
 
-            if not os.path.exists(self.relative(file.strip("/"))):
+            if not os.path.exists(self.relative(vfile.strip("/"))):
                 continue 
 
-            (retval, output) = self.run([file])
+            (retval, output) = self.run([vfile])
             if output:
                 failed = True
-                logging.error("FAIL: Cron file %s has output with package removed" % file)
+                logging.error("FAIL: Cron file %s has output with package removed" % vfile)
 
         if failed:
             panic()
@@ -1087,27 +1086,27 @@
         """Check if the packages have logrotate files under /etc/logrotate.d and in case positive, 
         it returns the list of files. """
 
-        dir = self.relative("var/lib/dpkg/info")
-        list = []
+        vdir = self.relative("var/lib/dpkg/info")
+        vlist = []
         has_logrotatefiles  = False
         for p in packages:
             basename = p + ".list"
 
-	    if not os.path.exists(os.path.join(dir,basename)):
+	    if not os.path.exists(os.path.join(vdir,basename)):
                 continue
 
-            f = file(os.path.join(dir,basename), "r")
+            f = file(os.path.join(vdir,basename), "r")
             for line in f:
                 pathname = line.strip()
                 if pathname.startswith("/etc/logrotate.d/"):
                     if os.path.isfile(self.relative(pathname.strip("/"))):
                         if not has_logrotatefiles:
                             has_logrotatefiles = True
-                        list.append(pathname)
+                        vlist.append(pathname)
                         logging.info("Package " + p + " contains logrotate file: " + pathname)
             f.close()
 
-        return has_logrotatefiles, list
+        return has_logrotatefiles, vlist
 
     def check_output_logrotatefiles (self, list):
         """Check if a given list of logrotatefiles has any output. Executes 
@@ -1116,12 +1115,12 @@
         # XXX That's a crude hack (to fix #602409). Can't we define a set of needed packages differently?
         #     It also introduces the need for hack to fix #602409 in piuparts.py
         (a,b) = self.run(['apt-get','install', '-y', 'logrotate'])
-        for file in list:
+        for vfile in list:
 
-            if not os.path.exists(self.relative(file.strip("/"))):
+            if not os.path.exists(self.relative(vfile.strip("/"))):
                 continue 
 
-            (retval, output) = self.run(['/usr/sbin/logrotate', file])
+            (retval, output) = self.run(['/usr/sbin/logrotate', vfile])
             if output or retval != 0:
                 failed = True
                 logging.error("FAIL: Logrotate file %s exits with error or has output with package removed" % file)
@@ -1139,9 +1138,9 @@
             panic()
         list_scripts = os.listdir(basepath)
         list_scripts.sort()
-        for file in list_scripts:
-            if file.startswith(step):
-                script = os.path.join("tmp/scripts", file)
+        for vfile in list_scripts:
+            if vfile.startswith(step):
+                script = os.path.join("tmp/scripts", vfile)
                 self.run([script]) 
 
 
@@ -1321,7 +1320,7 @@
             'p': stat.S_IFIFO,
         }
 
-        dict = {}
+        vdict = {}
 
         tf = self._execute_getoutput(['find','/','-xdev','-printf',
                 "%y %m %U %G %s %p %l \\n".replace(' ','\\0')])
@@ -1348,13 +1347,13 @@
                 st.st_mode = mode_map[splut[0]] | int(splut[1],8)
                 (st.st_uid, st.st_gid, st.st_size) = map(int, splut[2:5])
 
-                dict[splut[5]] = (st, splut[6])
+                vdict[splut[5]] = (st, splut[6])
 
             f.close()
         finally:
             os.remove(tf)
 
-        return dict     
+        return vdict     
 
     def get_files_owned_by_packages(self):
         tf = self._execute_getoutput(['bash','-ec','''
@@ -1363,22 +1362,22 @@
                     xargs -r0 egrep . /dev/null
                 test "${PIPESTATUS[*]}" = "0 0"
             '''])
-        dict = {}
+        vdict = {}
         try:
             f = file(tf)
             for l in f:
                 (lf,pathname) = l.rstrip('\n').split(':',1)
                 assert lf.endswith('.list')
                 pkg = lf[:-5]
-                if pathname in dict:
-                    dict[pathname].append(pkg)
+                if pathname in vdict:
+                    vdict[pathname].append(pkg)
                 else:
-                    dict[pathname] = [pkg]
+                    vdict[pathname] = [pkg]
 
             f.close()
         finally:
             os.remove(tf)
-        return dict
+        return vdict
 
     def check_for_broken_symlinks(self):
         if not settings.check_broken_symlinks:
@@ -1486,15 +1485,15 @@
     """Return list of indented filenames."""
     meta_infos = meta_infos[:]
     meta_infos.sort()
-    list = []
+    vlist = []
     for name, data in meta_infos:
-        list.append("  %s\t" % name)
+        vlist.append("  %s\t" % name)
         if name in file_owners:
-            list.append(" owned by: %s\n" % ", ".join(file_owners[name]))
+            vlist.append(" owned by: %s\n" % ", ".join(file_owners[name]))
 	else:
-            list.append(" not owned\n")	
+            vlist.append(" not owned\n")	
 
-    return "".join(list)
+    return "".join(vlist)
 
 
 def offending_packages(meta_infos, file_owners):
@@ -1512,10 +1511,10 @@
     list of removed elements.
     """
     warn = []
-    for file in depsfiles:
-        if file in files:
-            files.remove(file)
-            warn.append(file)
+    for vfile in depsfiles:
+        if vfile in files:
+            files.remove(vfile)
+            warn.append(vfile)
     return warn
 
 
@@ -1537,13 +1536,13 @@
 
 def get_package_names_from_package_files(filenames):
     """Return list of package names given list of package file names."""
-    list = []
+    vlist = []
     for filename in filenames:
         (status, output) = run(["dpkg", "--info", filename])
         for line in [line.lstrip() for line in output.split("\n")]:
             if line[:len("Package:")] == "Package:":
-                list.append(line.split(":", 1)[1].strip())
-    return list
+                vlist.append(line.split(":", 1)[1].strip())
+    return vlist
 
 # Method to process a changes file, returning a list of all the .deb packages
 # from the 'Files' stanza.
@@ -1796,7 +1795,7 @@
 
     chroot = get_chroot()
     chroot.create()
-    id = do_on_panic(chroot.remove)
+    cid = do_on_panic(chroot.remove)
 
     if settings.basetgz:
         root_tgz = settings.basetgz
@@ -1820,10 +1819,10 @@
             save_meta_data(settings.save_end_meta, root_info, selections)
     
         chroot.remove()
-        dont_do_on_panic(id)
+        dont_do_on_panic(cid)
         chroot = get_chroot()
         chroot.create()
-        id = do_on_panic(chroot.remove)
+        cid = do_on_panic(chroot.remove)
 
     # leave indication in logfile why we do what we do
     logging.info("Notice: package selections and meta data from target disto saved, now starting over from source distro. See the description of --save-end-meta and --end-meta to learn why this is neccessary and how to possibly avoid it.")
@@ -1859,7 +1858,7 @@
     if root_tgz != settings.basetgz:
         remove_files([root_tgz])
     chroot.remove()
-    dont_do_on_panic(id)
+    dont_do_on_panic(cid)
 
     return result
 
@@ -2148,7 +2147,7 @@
     else:
         setup_logging(DUMP, log_file_name)
 
-    exit = None
+    exitcode = None
 
     if not settings.tmpdir:
         if "TMPDIR" in os.environ:
@@ -2175,16 +2174,16 @@
        (not settings.basetgz or len(settings.debian_distros) > 1):
         logging.error("--keep-sources-list only makes sense with --basetgz "
                       "and only one distribution")
-        exit = 1
+        exitcode = 1
 
     if not args:
         logging.error("Need command line arguments: " +
                       "names of packages or package files")
-        exit = 1
+        exitcode = 1
     settings.testobjects = args
 
-    if exit is not None:
-        sys.exit(exit)
+    if exitcode is not None:
+        sys.exit(exitcode)
 
     return args
     
@@ -2205,7 +2204,7 @@
     if len(settings.debian_distros) == 1:
         chroot = get_chroot()
         chroot.create()
-        id = do_on_panic(chroot.remove)
+        cid = do_on_panic(chroot.remove)
 
         root_info = chroot.save_meta_data()
         selections = chroot.get_selections()
@@ -2229,7 +2228,7 @@
                 panic()
     
         chroot.remove()
-        dont_do_on_panic(id)
+        dont_do_on_panic(cid)
     else:
         if install_and_upgrade_between_distros(package_list, packages):
             logging.info("PASS: Upgrading between Debian distributions.")
diff --git a/piupartslib/__init__.py b/piupartslib/__init__.py
--- a/piupartslib/__init__.py
+++ b/piupartslib/__init__.py
@@ -30,12 +30,12 @@
     assert url.endswith(".bz2")
     socket = urllib.urlopen(url)
     decompressor = bz2.BZ2Decompressor()
-    file = cStringIO.StringIO()
+    bzfile = cStringIO.StringIO()
     while True:
         data = socket.read(1024)
         if not data:
             break
-        file.write(decompressor.decompress(data))
+        bzfile.write(decompressor.decompress(data))
     socket.close()
-    file.seek(0)
-    return file
+    bzfile.seek(0)
+    return bzfile
diff --git a/piupartslib/dependencyparser.py b/piupartslib/dependencyparser.py
--- a/piupartslib/dependencyparser.py
+++ b/piupartslib/dependencyparser.py
@@ -205,28 +205,28 @@
         return self._list
 
     def _parse_dependencies(self):
-        list = []
+        vlist = []
         dep = self._parse_dependency()
         while dep:
-            list.append(dep)
+            vlist.append(dep)
             self._cursor.skip_whitespace()
             if self._cursor.at_end():
                 break
             if not self._cursor.match_literal(","):
                 raise DependencySyntaxError("Expected comma", self._cursor)
             dep = self._parse_dependency()
-        return list
+        return vlist
 
     def _parse_dependency(self):
-        list = []
+        vlist = []
         dep = self._parse_possible_dependency()
         while dep:
-            list.append(dep)
+            vlist.append(dep)
             self._cursor.skip_whitespace()
             if not self._cursor.match_literal("|"):
                 break
             dep = self._parse_possible_dependency()
-        return list
+        return vlist
         
     def _parse_possible_dependency(self):
         name = self._parse_package_name()
@@ -291,7 +291,7 @@
         if self._cursor.get_char() == "[":
             self._cursor.next()
             
-            list = []
+            vlist = []
             while True:
                 self._cursor.skip_whitespace()
                 if self._cursor.get_char() == "]":
@@ -301,8 +301,8 @@
                 if not m:
                     raise DependencySyntaxError("Expected architecture name",
                                                 self._cursor)
-                list.append(m.group())
+                vlist.append(m.group())
                     
-            return list
+            return vlist
         else:
             return None
diff --git a/piupartslib/packagesdb.py b/piupartslib/packagesdb.py
--- a/piupartslib/packagesdb.py
+++ b/piupartslib/packagesdb.py
@@ -105,18 +105,18 @@
         return depends
 
     def dependencies(self):
-        list = []
+        vlist = []
         for header in ["Depends", "Pre-Depends"]:
             if header in self:
-                list += self._parse_dependencies(header)
-        return list
+                vlist += self._parse_dependencies(header)
+        return vlist
 
     def provides(self):
-        list = []
+        vlist = []
         for header in ["Provides"]:
             if header in self:
-                list += self._parse_dependencies(header)
-        return list
+                vlist += self._parse_dependencies(header)
+        return vlist
 
     def is_testable(self):
         """Are we testable at all? Required aren't."""
@@ -258,25 +258,25 @@
     def set_subdirs(self, ok=None, fail=None, evil=None, reserved=None, morefail=None):
         # Prefix all the subdirs with the prefix
         if self.prefix:
-            format = self.prefix + "/%s"
+            pformat = self.prefix + "/%s"
         else:
-            format = "%s"
+            pformat = "%s"
         if ok:
-            self._ok = format % ok
+            self._ok = pformat % ok
         if fail:
-            self._fail = format % fail
+            self._fail = pformat % fail
         if evil:
-            self._evil = format % evil
+            self._evil = pformat % evil
         if reserved:
-            self._reserved = format % reserved
+            self._reserved = pformat % reserved
         if morefail:
-            self._morefail = [format % s for s in morefail]
+            self._morefail = [pformat % s for s in morefail]
         self._all = [self._ok, self._fail, self._evil, self._reserved] + self._morefail
            
     def create_subdirs(self):
-        for dir in self._all:
-            if not os.path.exists(dir):
-                os.makedirs(dir)
+        for sdir in self._all:
+            if not os.path.exists(sdir):
+                os.makedirs(sdir)
         
     def read_packages_file(self, input):
         self._packages_files.append(PackagesFile(input))
@@ -455,9 +455,9 @@
         return self.get_packages_in_state("waiting-to-be-tested")
 
     def reserve_package(self):
-        list = self._find_packages_ready_for_testing()
-        random.shuffle(list)
-        for p in list:
+        plist = self._find_packages_ready_for_testing()
+        random.shuffle(plist)
+        for p in plist:
             if self._logdb.create(self._reserved, p["Package"],
                                   p["Version"], ""):
                 return p
diff --git a/unittests.py b/unittests.py
--- a/unittests.py
+++ b/unittests.py
@@ -63,18 +63,18 @@
         return os.path.dirname(pathname), os.path.basename(pathname)
     
     def exists(self, pathname):
-        dir, base = self._parse(pathname)
-        return base in self.dict[dir]
+        vdir, base = self._parse(pathname)
+        return base in self.dict[vdir]
     
     def open_file(self, pathname, mode):
-        dir, base = self._parse(pathname)
-        self.dict[dir].append(base)
+        vdir, base = self._parse(pathname)
+        self.dict[vdir].append(base)
         return StringIO.StringIO()
         
     def remove_file(self, pathname):
-        dir, base = self._parse(pathname)
-        if base in self.dict[dir]:
-            del self.dict[dir]
+        vdir, base = self._parse(pathname)
+        if base in self.dict[vdir]:
+            del self.dict[vdir]
 
     def create(self, subdir, package, version, contents):
         return True

Reply via email to