WEB开发网
开发学院软件开发Python 使用 Python 进行线程编程 阅读

使用 Python 进行线程编程

 2008-09-30 12:46:16 来源:WEB开发网   
核心提示: 多队列数据挖掘网站import Queueimport threadingimport urllib2import timefrom BeautifulSoup import BeautifulSouphosts = ["http://yahoo.com", "

多队列数据挖掘网站

import Queue
import threading
import urllib2
import time
from BeautifulSoup import BeautifulSoup
hosts = ["http://yahoo.com", "http://google.com", "http://amazon.com",
    "http://ibm.com", "http://apple.com"]
queue = Queue.Queue()
out_queue = Queue.Queue()
class ThreadUrl(threading.Thread):
  """Threaded Url Grab"""
  def __init__(self, queue, out_queue):
    threading.Thread.__init__(self)
    self.queue = queue
    self.out_queue = out_queue
  def run(self):
    while True:
      #grabs host from queue
      host = self.queue.get()
      #grabs urls of hosts and then grabs chunk of webpage
      url = urllib2.urlopen(host)
      chunk = url.read()
      #place chunk into out queue
      self.out_queue.put(chunk)
      #signals to queue job is done
      self.queue.task_done()
class DatamineThread(threading.Thread):
  """Threaded Url Grab"""
  def __init__(self, out_queue):
    threading.Thread.__init__(self)
    self.out_queue = out_queue
  def run(self):
    while True:
      #grabs host from queue
      chunk = self.out_queue.get()
      #parse the chunk
      soup = BeautifulSoup(chunk)
      print soup.findAll(['title'])
      #signals to queue job is done
      self.out_queue.task_done()
start = time.time()
def main():
  #spawn a pool of threads, and pass them queue instance
  for i in range(5):
    t = ThreadUrl(queue, out_queue)
    t.setDaemon(True)
    t.start()
  #populate queue with data
  for host in hosts:
    queue.put(host)
  for i in range(5):
    dt = DatamineThread(out_queue)
    dt.setDaemon(True)
    dt.start()
  #wait on the queue until everything has been processed
  queue.join()
  out_queue.join()
main()
print "Elapsed Time: %s" % (time.time() - start)

上一页  1 2 3 4 5 6  下一页

Tags:使用 Python 进行

编辑录入:爽爽 [复制链接] [打 印]
赞助商链接