Home | 簡體中文 | 繁體中文 | 雜文 | 打賞(Donations) | ITEYE 博客 | OSChina 博客 | Facebook | Linkedin | 知乎專欄 | Search | Email

10.2. Python 多綫程

10.2.1. threading 高級綫程介面

threading — Higher-level threading interface

import threading  
class MyThread(threading.Thread):  
    def __init__(self, name=None):  
        threading.Thread.__init__(self)  
        self.name = name  
      
    def run(self):  
        print self.name  
  
def test():  
    for i in range(0, 100):  
        t = MyThread("thread_" + str(i))  
        t.start()  
  
if __name__=='__main__':  
    test()  	
		

10.2.2. Lock 綫程鎖

這裡實現了一個計數器 count 這個全局變數會被多個綫程同時操作,使其能夠被順序相加,需要靠綫程鎖的幫助。

		
#-*- encoding: utf-8 -*-
import threading
import time
 
class Test(threading.Thread):
    def __init__(self, num):
        threading.Thread.__init__(self)
        self._run_num = num
 
    def run(self):
        global count, mutex
        threadname = threading.currentThread().getName()
 
        for x in range(int(self._run_num)):
            mutex.acquire()
            count = count + 1
            mutex.release()
            print (threadname, x, count)
            time.sleep(1)
 
if __name__ == '__main__':
    global count, mutex
    threads = []
    num = 5
    count = 0
    # 創建鎖
    mutex = threading.Lock()
    # 創建綫程對象
    for x in range(num):
        threads.append(Test(10))
    # 啟動綫程
    for t in threads:
        t.start()
    # 等待子綫程結束
    for t in threads:
        t.join()
        
		

10.2.3. Queue 隊列

ref: http://www.ibm.com/developerworks/aix/library/au-threadingpython/

		
 #!/usr/bin/env python
import Queue
import threading
import urllib2
import time

hosts = ["http://yahoo.com", "http://google.com", "http://amazon.com",
"http://ibm.com", "http://apple.com"]

queue = Queue.Queue()

class ThreadUrl(threading.Thread):
	"""Threaded Url Grab"""
	def __init__(self, queue):
	  threading.Thread.__init__(self)
	  self.queue = queue

	def run(self):
	  while True:
		#grabs host from queue
		host = self.queue.get()

		#grabs urls of hosts and prints first 1024 bytes of page
		url = urllib2.urlopen(host)
		print url.read(1024)

		#signals to queue job is done
		self.queue.task_done()

start = time.time()
def main():

#spawn a pool of threads, and pass them queue instance 
for i in range(5):
  t = ThreadUrl(queue)
  t.setDaemon(True)
  t.start()
  
#populate queue with data   
  for host in hosts:
    queue.put(host)

#wait on the queue until everything has been processed     
queue.join()

main()
print "Elapsed Time: %s" % (time.time() - start)