Chinaunix首页 | 论坛 | 博客
  • 博客访问: 1790078
  • 博文数量: 297
  • 博客积分: 285
  • 博客等级: 二等列兵
  • 技术积分: 3006
  • 用 户 组: 普通用户
  • 注册时间: 2010-03-06 22:04
个人简介

Linuxer, ex IBMer. GNU https://hmchzb19.github.io/

文章分类

全部博文(297)

文章存档

2020年(11)

2019年(15)

2018年(43)

2017年(79)

2016年(79)

2015年(58)

2014年(1)

2013年(8)

2012年(3)

分类: Python/Ruby

2016-05-05 11:02:40

1. 该代码来自:

点击(此处)折叠或打开

  1. import threading
  2. import urllib.request
  3. import time
  4. import functools

  5. UPDATE_INTERVAL=0.01

  6. class URLThread(threading.Thread):
  7.     def __init__(self,url):
  8.         super(URLThread,self).__init__()
  9.         self.url=url
  10.         self.response="No Content"

  11.     def run(self):
  12.         headers={'User-Agent':"Mozilla/5.0 (Windows NT 6.1; WOW64; rv:26.0) Gecko/20100101 Firefox/26.0"}
  13.         url=urllib.request.Request(self.url,headers=headers)
  14.         self.request=urllib.request.urlopen(self.url).read()
  15.         self.response=self.request.decode()

  16. def multi_get(uries,timeout=2.0):
  17.     def alive_count(lst):
  18.         alive=map(lambda x: 1 if x.isAlive() else 0 ,lst)
  19.         return functools.reduce(lambda a,b: a+b, alive)

  20.     threads=[URLThread(uri) for uri in uries]
  21.     for thread in threads:
  22.         thread.start()

  23.     while alive_count(threads) > 0 and timeout > 0.0:
  24.         timeout=timeout-UPDATE_INTERVAL
  25.         time.sleep(UPDATE_INTERVAL)
  26.     return [(x.url,x.response) for x in threads]

  27. def run_multi_get():
  28.     sites=["","","",""]
  29.     requests=multi_get(sites,timeout=2.5)
  30.     with open("site","w") as ofile:
  31.         for request in requests:
  32.             output=":".join(request)
  33.             ofile.write(output+"\n")


  34. run_multi_get()
这段代码有意思的函数, 挺有意思的用法,return 当前alive的 thread 的数目。

  1.     def alive_count(lst):
  2.         alive=map(lambda x: 1 if x.isAlive() else 0 ,lst)
  3.         return functools.reduce(lambda a,b: a+b, alive)

如果让我写,我就直接这么写了。

点击(此处)折叠或打开

  1. def multi_get(uries):
  2.     threads=[URLThread(uri) for uri in uries]

  3.     for thread in threads:
  4.         thread.start()

  5.     for thread in threads:
  6.         thread.join()

  7.     return [(x.url,x.response) for x in threads]

2.  代码出自: 
该脚本,输入多个目录(小于4个),它对各个目录,各开一个线程统计目录下的文件数目。该脚本hardcode开4个线程

点击(此处)折叠或打开

  1. import os,time
  2. import threading
  3. import queue
  4. class WorkerThread(threading.Thread):
  5.     """ A worker thread that takes directory names from a queue, finds all
  6.     files in them recursively and reports the result.
  7.     Input is done by placing directory names (as strings) into the
  8.     Queue passed in dir_q.
  9.     Output is done by placing tuples into the Queue passed in result_q.
  10.     Each tuple is (thread name, dirname, [list of files]).
  11.     Ask the thread to stop by calling its join() method.
  12.     """
  13.     def __init__(self,dir_q,result_q):
  14.         super(WorkerThread,self).__init__()
  15.         self.dir_q=dir_q
  16.         self.result_q=result_q
  17.         self.stoprequest=threading.Event()

  18.     def run(self):
  19.         # As long as we weren't asked to stop, try to take new tasks from the
  20.         # queue. The tasks are taken with a blocking 'get', so no CPU
  21.         # cycles are wasted while waiting.
  22.         # Also, 'get' is given a timeout, so stoprequest is always checked,
  23.         # even if there's nothing in the queue.
  24.         while not self.stoprequest.isSet():
  25.             try:
  26.                 dirname=self.dir_q.get(True,0.05)
  27.                 filename=list(self.__files_in_dir(dirname))
  28.                 self.result_q.put((self.name,dirname,filename))
  29.             except queue.Empty:
  30.                 continue
  31.     
  32.     def join(self,timeout=None):
  33.         self.stoprequest.set()
  34.         super(WorkerThread,self).join(timeout)

  35.     def __files_in_dir(self,dirname):
  36.         #Given a directory name, yields the names of all files (not dirs)
  37.         #contained in this directory and its sub-directories.
  38.         for path,dirs,files in os.walk(dirname):
  39.             for file in files:
  40.                 yield os.path.join(path,file)

  41. def utilize_WorkerThread(args):
  42.     #create a single input and single output queue for all threads
  43.     dir_q=queue.Queue()
  44.     result_q=queue.Queue()

  45.     #create the thread pool
  46.     pool=[WorkerThread(dir_q=dir_q,result_q=result_q) for i in range(4)]

  47.     #start all threads
  48.     for thread in pool:
  49.         thread.start()

  50.     #give the workers some work todo
  51.     work_count=0
  52.     for dir in args:
  53.         if os.path.exists(dir):
  54.             work_count+=1
  55.             dir_q.put(dir)

  56.     print("Assigned {} dirs to worker".format(work_count))

  57.     #now get all results
  58.     while work_count > 0:
  59.         #blocking get from a Queue
  60.         result=result_q.get()
  61.         print("From thread {0}: {1} files founded in dir {2}".format(result[0],len(result[2]),result[1]))
  62.         work_count-=1

  63.     #ask threads to die and wait for them to do it
  64.     for thread in pool:
  65.         thread.join()


  66. import sys
  67. utilize_WorkerThread(sys.argv[1:])

3. 是个thread ping 的例子。
对每一个IP 开一个线程去ping 

点击(此处)折叠或打开

  1. def ping_thread():
  2.     class ip_check(threading.Thread):

  3.         def __init__(self,ip):
  4.             super(ip_check,self).__init__()
  5.             self.ip=ip
  6.             self.__successful_ping=-1
  7.         def run(self):
  8.             ping_out=os.popen("ping -q -c2 {}".format(self.ip))
  9.             while True:
  10.                 line=ping_out.readline()
  11.                 if not line:
  12.                     break
  13.                 n_received=re.findall(received_packages,line)
  14.                 if n_received:
  15.                     self.__successful_ping=int(n_received[0])
  16.         def status(self):
  17.             if self.__successful_ping==0:
  18.                 return "no reponse"
  19.             elif self.__successful_ping==1:
  20.                 return "alive,but 50% package loss"
  21.             elif self.__successful_ping==2:
  22.                 return "alive"
  23.             else:
  24.                 return "should not occur"

  25.     received_packages = re.compile(r"(\d) received")

  26.     check_results=[]
  27.     for suffix in range(100,105):
  28.         ip="192.168.0."+str(suffix)
  29.         current=ip_check(ip)
  30.         check_results.append(current)
  31.         current.start()

  32.     for el in check_results:
  33.         el.join()
  34.         logging.debug("status from {0} is {1}".format(el.ip,el.status()))

阅读(918) | 评论(0) | 转发(0) |
0

上一篇:Python 多线程示例2

下一篇:python 两道题目

给主人留下些什么吧!~~