G
Gib Bogle
The code below runs with Python 2.5.4, but gives the following error messages
with Python 2.6.1. What needs to be done to make it work? Thanks.
C:\Summer09\Tutorials>python url_queue.pyw
Traceback (most recent call last):
File "url_queue.pyw", line 3, in <module>
import threading
File "C:\Summer09\Tutorials\threading.py", line 9, in <module>
class ProcessingThread(threading.Thread, QtCore.QObject):
AttributeError: 'module' object has no attribute 'Thread'
url_queue.py
------------
#!/usr/bin/env python
import Queue
import threading
import urllib2
import time
hosts = ["http://yahoo.com", "http://google.com", "http://amazon.com",
"http://ibm.com", "http://apple.com"]
queue = Queue.Queue()
class ThreadUrl(threading.Thread):
#"""Threaded Url Grab"""
def __init__(self, queue,i):
threading.Thread.__init__(self)
self.queue = queue
self.num = i
print "Thread: ",self.num
def run(self):
while True:
#grabs host from queue
host = self.queue.get()
print "num, host: ",self.num,host
#grabs urls of hosts and prints first 1024 bytes of page
url = urllib2.urlopen(host)
print url.read(1024)
#signals to queue job is done
self.queue.task_done()
start = time.time()
def main():
#spawn a pool of threads, and pass them queue instance
for i in range(5):
t = ThreadUrl(queue,i)
t.setDaemon(True)
t.start()
#populate queue with data
for host in hosts:
queue.put(host)
#wait on the queue until everything has been processed
queue.join()
main()
print "Elapsed Time: %s" % (time.time() - start)
with Python 2.6.1. What needs to be done to make it work? Thanks.
C:\Summer09\Tutorials>python url_queue.pyw
Traceback (most recent call last):
File "url_queue.pyw", line 3, in <module>
import threading
File "C:\Summer09\Tutorials\threading.py", line 9, in <module>
class ProcessingThread(threading.Thread, QtCore.QObject):
AttributeError: 'module' object has no attribute 'Thread'
url_queue.py
------------
#!/usr/bin/env python
import Queue
import threading
import urllib2
import time
hosts = ["http://yahoo.com", "http://google.com", "http://amazon.com",
"http://ibm.com", "http://apple.com"]
queue = Queue.Queue()
class ThreadUrl(threading.Thread):
#"""Threaded Url Grab"""
def __init__(self, queue,i):
threading.Thread.__init__(self)
self.queue = queue
self.num = i
print "Thread: ",self.num
def run(self):
while True:
#grabs host from queue
host = self.queue.get()
print "num, host: ",self.num,host
#grabs urls of hosts and prints first 1024 bytes of page
url = urllib2.urlopen(host)
print url.read(1024)
#signals to queue job is done
self.queue.task_done()
start = time.time()
def main():
#spawn a pool of threads, and pass them queue instance
for i in range(5):
t = ThreadUrl(queue,i)
t.setDaemon(True)
t.start()
#populate queue with data
for host in hosts:
queue.put(host)
#wait on the queue until everything has been processed
queue.join()
main()
print "Elapsed Time: %s" % (time.time() - start)