multithreading - exiting thread application with signal interrupt in python -
i on osx , trying exit programme pressing ctrl+c.
but seems if have signal handler registered in main thread doesn't exit while thread executing on pressing ctrl+c.
here piece of cake trying download 3 mp3 files net each in separate thread.
import queue import urllib2 import os import signal import sys import time import threading socket import error _socketerror urls = ["http://broadcast.lds.org/churchmusic/mp3/1/2/nowords/271.mp3", "http://s1.fans.ge/mp3/201109/08/john_legend_so_high_remix(fans_ge).mp3", "http://megaboon.com/common/preview/track/786203.mp3"] queue = queue.queue() def do_exit(signum, stack): # handle unix signal recived , exit sys.stderr.write("received signal %d " % (signum)) raise systemexit("exiting") class threadedfetch(threading.thread): """ docstring threadedfetch """ def __init__(self, queue, count = 1): super(threadedfetch, self).__init__() self.queue = queue def run(self): while true: # grabs url of link , path saveto , save lst host = self.queue.get() # submit url download , location save. self._downloadfile(host[0], host[1]) def _downloadfile(self, url, saveto=none): file_name = url.split('/')[-1] self.setname("parent_%s_thread" % file_name.split(".")[0]) if not saveto: saveto = '/users/sanjeevkumar/desktop' try: u = urllib2.urlopen(url) except urllib2.urlerror , er: print("%s %s failed download." % (er.reason, file_name)) self.queue.task_done() print "exiting: %s" % self.getname() except _socketerror , err: print("%s \n %s failed download." % (err, file_name)) self.queue.task_done() else: th = threading.thread( target=self._filewritetodisk, args=(saveto, u, file_name), name="filewrite_child_of_%s" % self.getname(), ) # if user clicks close while thread still running, # programme wait till save done, # close. th.daemon = false th.start() time.sleep(0.1) print "writing disk using child: %s " % th.name def _filewritetodisk(self, saveto, urlobject, file_name): path = os.path.join(saveto, file_name) try: f = open(path, 'wb') except ioerror , er: self.queue.task_done() print er homecoming meta = urlobject.info() file_size = int(meta.getheaders("content-length")[0]) print "downloading: %s : %s " % (file_name, file_size) file_size_dl = 0 block_sz = 8192 while true: buffer = urlobject.read(block_sz) if not buffer: break file_size_dl += len(buffer) f.write(buffer) status = r"%10d [%3.2f%%]" % (file_size_dl, file_size_dl * 100. / file_size) status = status + chr(8)*(len(status)+1) sys.stdout.write('%s\r' % status) time.sleep(.05) sys.stdout.flush() if file_size_dl == file_size: print r"download completed %s%% file %s, saved %s" % (file_size_dl * 100. / file_size, file_name, saveto) f.close() # signals queue job done self.queue.task_done() def main(): # register signal in main thread signal.signal(signal.sigint, do_exit) try: # spawn pool of threads, , pass them queue instance in range(len(urls)): t = threadedfetch(queue) t.setdaemon(true) time.sleep(0.1) t.start() urls_saveto = {urls[0]: none, urls[1]: none, urls[2]: none} # populate queue info item, value in urls_saveto.iteritems(): queue.put([item, value]) # wait on queue until has been processed queue.join() print '*** done' except (keyboardinterrupt, systemexit): print '\n! received keyboard interrupt, quitting threads.\n' if __name__ == "__main__": main() feel free point else can improved strategically. help appreciated.
python multithreading signals
No comments:
Post a Comment