537 # the socket. we want to reconnect when somebody tries to send again. |
537 # the socket. we want to reconnect when somebody tries to send again. |
538 # |
538 # |
539 # NOTE: we DO propagate the error, though, because we cannot simply |
539 # NOTE: we DO propagate the error, though, because we cannot simply |
540 # ignore the error... the caller will know if they can retry. |
540 # ignore the error... the caller will know if they can retry. |
541 if self.debuglevel > 0: |
541 if self.debuglevel > 0: |
542 print "send:", repr(str) |
542 print("send:", repr(str)) |
543 try: |
543 try: |
544 blocksize = 8192 |
544 blocksize = 8192 |
545 read = getattr(str, 'read', None) |
545 read = getattr(str, 'read', None) |
546 if read is not None: |
546 if read is not None: |
547 if self.debuglevel > 0: |
547 if self.debuglevel > 0: |
548 print "sending a read()able" |
548 print("sending a read()able") |
549 data = read(blocksize) |
549 data = read(blocksize) |
550 while data: |
550 while data: |
551 self.sock.sendall(data) |
551 self.sock.sendall(data) |
552 data = read(blocksize) |
552 data = read(blocksize) |
553 else: |
553 else: |
595 keepalive_handler = HTTPHandler() |
595 keepalive_handler = HTTPHandler() |
596 opener = urllib2.build_opener(keepalive_handler) |
596 opener = urllib2.build_opener(keepalive_handler) |
597 urllib2.install_opener(opener) |
597 urllib2.install_opener(opener) |
598 pos = {0: 'off', 1: 'on'} |
598 pos = {0: 'off', 1: 'on'} |
599 for i in (0, 1): |
599 for i in (0, 1): |
600 print " fancy error handling %s (HANDLE_ERRORS = %i)" % (pos[i], i) |
600 print(" fancy error handling %s (HANDLE_ERRORS = %i)" % (pos[i], i)) |
601 HANDLE_ERRORS = i |
601 HANDLE_ERRORS = i |
602 try: |
602 try: |
603 fo = urllib2.urlopen(url) |
603 fo = urllib2.urlopen(url) |
604 fo.read() |
604 fo.read() |
605 fo.close() |
605 fo.close() |
606 try: |
606 try: |
607 status, reason = fo.status, fo.reason |
607 status, reason = fo.status, fo.reason |
608 except AttributeError: |
608 except AttributeError: |
609 status, reason = None, None |
609 status, reason = None, None |
610 except IOError as e: |
610 except IOError as e: |
611 print " EXCEPTION: %s" % e |
611 print(" EXCEPTION: %s" % e) |
612 raise |
612 raise |
613 else: |
613 else: |
614 print " status = %s, reason = %s" % (status, reason) |
614 print(" status = %s, reason = %s" % (status, reason)) |
615 HANDLE_ERRORS = orig |
615 HANDLE_ERRORS = orig |
616 hosts = keepalive_handler.open_connections() |
616 hosts = keepalive_handler.open_connections() |
617 print "open connections:", hosts |
617 print("open connections:", hosts) |
618 keepalive_handler.close_all() |
618 keepalive_handler.close_all() |
619 |
619 |
620 def continuity(url): |
620 def continuity(url): |
621 from . import util |
621 from . import util |
622 md5 = util.md5 |
622 md5 = util.md5 |
627 urllib2.install_opener(opener) |
627 urllib2.install_opener(opener) |
628 fo = urllib2.urlopen(url) |
628 fo = urllib2.urlopen(url) |
629 foo = fo.read() |
629 foo = fo.read() |
630 fo.close() |
630 fo.close() |
631 m = md5(foo) |
631 m = md5(foo) |
632 print format % ('normal urllib', m.hexdigest()) |
632 print(format % ('normal urllib', m.hexdigest())) |
633 |
633 |
634 # now install the keepalive handler and try again |
634 # now install the keepalive handler and try again |
635 opener = urllib2.build_opener(HTTPHandler()) |
635 opener = urllib2.build_opener(HTTPHandler()) |
636 urllib2.install_opener(opener) |
636 urllib2.install_opener(opener) |
637 |
637 |
638 fo = urllib2.urlopen(url) |
638 fo = urllib2.urlopen(url) |
639 foo = fo.read() |
639 foo = fo.read() |
640 fo.close() |
640 fo.close() |
641 m = md5(foo) |
641 m = md5(foo) |
642 print format % ('keepalive read', m.hexdigest()) |
642 print(format % ('keepalive read', m.hexdigest())) |
643 |
643 |
644 fo = urllib2.urlopen(url) |
644 fo = urllib2.urlopen(url) |
645 foo = '' |
645 foo = '' |
646 while True: |
646 while True: |
647 f = fo.readline() |
647 f = fo.readline() |
648 if f: |
648 if f: |
649 foo = foo + f |
649 foo = foo + f |
650 else: break |
650 else: break |
651 fo.close() |
651 fo.close() |
652 m = md5(foo) |
652 m = md5(foo) |
653 print format % ('keepalive readline', m.hexdigest()) |
653 print(format % ('keepalive readline', m.hexdigest())) |
654 |
654 |
655 def comp(N, url): |
655 def comp(N, url): |
656 print ' making %i connections to:\n %s' % (N, url) |
656 print(' making %i connections to:\n %s' % (N, url)) |
657 |
657 |
658 sys.stdout.write(' first using the normal urllib handlers') |
658 sys.stdout.write(' first using the normal urllib handlers') |
659 # first use normal opener |
659 # first use normal opener |
660 opener = urllib2.build_opener() |
660 opener = urllib2.build_opener() |
661 urllib2.install_opener(opener) |
661 urllib2.install_opener(opener) |
662 t1 = fetch(N, url) |
662 t1 = fetch(N, url) |
663 print ' TIME: %.3f s' % t1 |
663 print(' TIME: %.3f s' % t1) |
664 |
664 |
665 sys.stdout.write(' now using the keepalive handler ') |
665 sys.stdout.write(' now using the keepalive handler ') |
666 # now install the keepalive handler and try again |
666 # now install the keepalive handler and try again |
667 opener = urllib2.build_opener(HTTPHandler()) |
667 opener = urllib2.build_opener(HTTPHandler()) |
668 urllib2.install_opener(opener) |
668 urllib2.install_opener(opener) |
669 t2 = fetch(N, url) |
669 t2 = fetch(N, url) |
670 print ' TIME: %.3f s' % t2 |
670 print(' TIME: %.3f s' % t2) |
671 print ' improvement factor: %.2f' % (t1 / t2) |
671 print(' improvement factor: %.2f' % (t1 / t2)) |
672 |
672 |
673 def fetch(N, url, delay=0): |
673 def fetch(N, url, delay=0): |
674 import time |
674 import time |
675 lens = [] |
675 lens = [] |
676 starttime = time.time() |
676 starttime = time.time() |
685 |
685 |
686 j = 0 |
686 j = 0 |
687 for i in lens[1:]: |
687 for i in lens[1:]: |
688 j = j + 1 |
688 j = j + 1 |
689 if not i == lens[0]: |
689 if not i == lens[0]: |
690 print "WARNING: inconsistent length on read %i: %i" % (j, i) |
690 print("WARNING: inconsistent length on read %i: %i" % (j, i)) |
691 |
691 |
692 return diff |
692 return diff |
693 |
693 |
694 def test_timeout(url): |
694 def test_timeout(url): |
695 global DEBUG |
695 global DEBUG |
696 dbbackup = DEBUG |
696 dbbackup = DEBUG |
697 class FakeLogger(object): |
697 class FakeLogger(object): |
698 def debug(self, msg, *args): |
698 def debug(self, msg, *args): |
699 print msg % args |
699 print(msg % args) |
700 info = warning = error = debug |
700 info = warning = error = debug |
701 DEBUG = FakeLogger() |
701 DEBUG = FakeLogger() |
702 print " fetching the file to establish a connection" |
702 print(" fetching the file to establish a connection") |
703 fo = urllib2.urlopen(url) |
703 fo = urllib2.urlopen(url) |
704 data1 = fo.read() |
704 data1 = fo.read() |
705 fo.close() |
705 fo.close() |
706 |
706 |
707 i = 20 |
707 i = 20 |
708 print " waiting %i seconds for the server to close the connection" % i |
708 print(" waiting %i seconds for the server to close the connection" % i) |
709 while i > 0: |
709 while i > 0: |
710 sys.stdout.write('\r %2i' % i) |
710 sys.stdout.write('\r %2i' % i) |
711 sys.stdout.flush() |
711 sys.stdout.flush() |
712 time.sleep(1) |
712 time.sleep(1) |
713 i -= 1 |
713 i -= 1 |
714 sys.stderr.write('\r') |
714 sys.stderr.write('\r') |
715 |
715 |
716 print " fetching the file a second time" |
716 print(" fetching the file a second time") |
717 fo = urllib2.urlopen(url) |
717 fo = urllib2.urlopen(url) |
718 data2 = fo.read() |
718 data2 = fo.read() |
719 fo.close() |
719 fo.close() |
720 |
720 |
721 if data1 == data2: |
721 if data1 == data2: |
722 print ' data are identical' |
722 print(' data are identical') |
723 else: |
723 else: |
724 print ' ERROR: DATA DIFFER' |
724 print(' ERROR: DATA DIFFER') |
725 |
725 |
726 DEBUG = dbbackup |
726 DEBUG = dbbackup |
727 |
727 |
728 |
728 |
729 def test(url, N=10): |
729 def test(url, N=10): |
730 print "checking error handler (do this on a non-200)" |
730 print("checking error handler (do this on a non-200)") |
731 try: error_handler(url) |
731 try: error_handler(url) |
732 except IOError: |
732 except IOError: |
733 print "exiting - exception will prevent further tests" |
733 print("exiting - exception will prevent further tests") |
734 sys.exit() |
734 sys.exit() |
735 print |
735 print('') |
736 print "performing continuity test (making sure stuff isn't corrupted)" |
736 print("performing continuity test (making sure stuff isn't corrupted)") |
737 continuity(url) |
737 continuity(url) |
738 print |
738 print('') |
739 print "performing speed comparison" |
739 print("performing speed comparison") |
740 comp(N, url) |
740 comp(N, url) |
741 print |
741 print('') |
742 print "performing dropped-connection check" |
742 print("performing dropped-connection check") |
743 test_timeout(url) |
743 test_timeout(url) |
744 |
744 |
745 if __name__ == '__main__': |
745 if __name__ == '__main__': |
746 import time |
746 import time |
747 try: |
747 try: |
748 N = int(sys.argv[1]) |
748 N = int(sys.argv[1]) |
749 url = sys.argv[2] |
749 url = sys.argv[2] |
750 except (IndexError, ValueError): |
750 except (IndexError, ValueError): |
751 print "%s <integer> <url>" % sys.argv[0] |
751 print("%s <integer> <url>" % sys.argv[0]) |
752 else: |
752 else: |
753 test(url, N) |
753 test(url, N) |