summaryrefslogtreecommitdiff
path: root/benchy.py
diff options
context:
space:
mode:
authorJoseph Hunkeler <jhunk@stsci.edu>2013-02-07 16:12:01 -0500
committerJoseph Hunkeler <jhunk@stsci.edu>2013-02-07 16:12:01 -0500
commitb9f8a1166b211954a82195f921b65a0185b63646 (patch)
tree9437225ba0a38015b4a3c6466f6798af4d920c2f /benchy.py
parentd0347a492d3e9ef2763e51c28cbe8ee235ec6158 (diff)
downloadbenchy-b9f8a1166b211954a82195f921b65a0185b63646.tar.gz
cant remember what i did but it was cool
Diffstat (limited to 'benchy.py')
-rwxr-xr-xbenchy.py88
1 files changed, 57 insertions, 31 deletions
diff --git a/benchy.py b/benchy.py
index cf109ab..84a9b5d 100755
--- a/benchy.py
+++ b/benchy.py
@@ -1,15 +1,16 @@
#!/usr/bin/env python
import os
+import sys
import time
import string
import StringIO
import tempfile
import numpy
+import argparse
from matplotlib import pyplot
class Statistics:
def __init__(self):
- print("Initializing %s" % (self.__class__))
self.results = {}
pass
@@ -59,9 +60,37 @@ class Stopwatch:
return (time.time() - self.start_time)
def result(self):
- return (self.stop_time - self.start_time)
+ return (self.stop_time - self.start_time)
+
+ def reset(self):
+ self.start_time = 0.0
+ self.stop_time = 0.0
-class Sequencial_test(Statistics):
+class Test_Controller:
+ def __init__(self):
+ print("Initializing %s" % (self.__class__))
+ self.timer = Stopwatch()
+ self.prefix='benchy_'
+ self.path_tests = os.path.join(os.curdir, 'tests')
+ self.path_results = os.path.join(os.curdir, 'results')
+ if not os.path.exists(self.path_tests):
+ os.mkdir(self.path_tests)
+ if not os.path.exists(self.path_results):
+ os.mkdir(self.path_results)
+
+
+class Tar_test(Test_Controller, Statistics):
+ def __init__(self, size=1):
+ Test_Controller.__init__(self)
+ Statistics.__init__(self)
+ Statistics.store(self, 'write_archive', [])
+ Statistics.store(self, 'read_archive', [])
+ self.MEGABYTE = 1024000
+ self.size = size
+
+ pass
+
+class Sequencial_test(Test_Controller, Statistics):
def __init__(self, size=1):
'''
size in megabytes
@@ -69,6 +98,7 @@ class Sequencial_test(Statistics):
mode is 'buffered' or 'unbuffered'
async is True or False
'''
+ Test_Controller.__init__(self)
Statistics.__init__(self)
Statistics.store(self, 'write_buffered_sync', [])
Statistics.store(self, 'write_buffered_async', [])
@@ -80,15 +110,7 @@ class Sequencial_test(Statistics):
Statistics.store(self, 'read_unbuffered_async', [])
self.MEGABYTE = 1024000
self.size = size
- self.timer = Stopwatch()
self.random_source = os.urandom(self.MEGABYTE * 10)
- self.prefix='sequencial_test_'
- self.path_tests = os.path.join(os.curdir, 'tests')
- self.path_results = os.path.join(os.curdir, 'results')
- if not os.path.exists(self.path_tests):
- os.mkdir(self.path_tests)
- if not os.path.exists(self.path_results):
- os.mkdir(self.path_results)
def __del__(self):
if os.path.exists(self.filename):
@@ -106,7 +128,7 @@ class Sequencial_test(Statistics):
def stress_buffered_sync(self, iter_max=1):
print("Synchronous buffered I/O")
for i in range(iter_max):
- print("\rTest: %d of %d" % (i+1, iter_max))
+ print("Test: %d of %d" % (i+1, iter_max))
self.write_buffered_sync()
self.read_buffered_sync()
self._cleanup()
@@ -114,7 +136,7 @@ class Sequencial_test(Statistics):
def stress_buffered_async(self, iter_max=1):
print("Asynchronous buffered I/O")
for i in range(iter_max):
- print("\rTest: %d of %d" % (i+1, iter_max))
+ print("Test: %d of %d" % (i+1, iter_max))
self.write_buffered_async()
self.read_buffered_async()
self._cleanup()
@@ -122,7 +144,8 @@ class Sequencial_test(Statistics):
def stress_unbuffered_sync(self, iter_max=1):
print("Synchronous unbuffered I/O")
for i in range(iter_max):
- print("\rTest: %d of %d\r" % (i+1, iter_max))
+ print("Test: %d of %d" % (i+1, iter_max))
+ sys.stdout.flush()
self.write_unbuffered_sync()
self.read_unbuffered_sync()
self._cleanup()
@@ -130,7 +153,7 @@ class Sequencial_test(Statistics):
def stress_unbuffered_async(self, iter_max=1):
print("Asynchronous unbuffered I/O")
for i in range(iter_max):
- print("\rTest: %d of %d\r" % (i+1, iter_max))
+ print("Test: %d of %d" % (i+1, iter_max))
self.write_unbuffered_async()
self.read_unbuffered_async()
self._cleanup()
@@ -229,27 +252,28 @@ class Sequencial_test(Statistics):
return rate, elapsed
if __name__ == "__main__":
- size_steps = [ 1, 4, 8 ]
- buffer_steps = [ 512000, 1024000, 4096000 ]
+ parser = argparse.ArgumentParser(description='Benchy the Benchmarker')
+ parser.add_argument('-p', '--passes', default=10, action='store', type=int, required=False)
+ parser.add_argument('-s', '--steps', default=[1], action='append', type=int, required=False)
+ args = vars(parser.parse_args())
- iter_max=25
- for size in size_steps:
+ for size in args['steps']:
print("### %dMB ####" % (size))
sequence_test = Sequencial_test(size)
- sequence_test.stress_buffered_sync(iter_max)
- sequence_test.stress_buffered_async(iter_max)
- sequence_test.stress_unbuffered_sync(iter_max)
- sequence_test.stress_unbuffered_async(iter_max)
+ sequence_test.stress_buffered_sync(args['passes'])
+ sequence_test.stress_buffered_async(args['passes'])
+ sequence_test.stress_unbuffered_sync(args['passes'])
+ sequence_test.stress_unbuffered_async(args['passes'])
for k in sequence_test.results:
- print('Plotting %s' % (k))
+ print('%s' % (k))
rate, elapsed = sequence_test.get_numpy(k)
rate_mean = numpy.repeat(numpy.mean(rate), rate.size, 0)
#rate_average = numpy.repeat(numpy.average(rate), rate.size, 0)
elapsed_mean = numpy.repeat(numpy.mean(elapsed), elapsed.size, 0)
#elapsed_average = numpy.repeat(numpy.average(elapsed), elapsed.size, 0)
- print("Rate mean: %f" % (numpy.mean(rate_mean)))
- print("Elapsed mean: %f" % (numpy.mean(elapsed_mean)))
+ print("\tRate mean: %0.4f" % (numpy.mean(rate_mean)))
+ print("\tElapsed mean: %0.4f" % (numpy.mean(elapsed_mean)))
pyplot.figure(1)
pyplot.subplot(211)
@@ -257,18 +281,20 @@ if __name__ == "__main__":
pyplot.grid(True)
pyplot.xlabel('Iteration')
pyplot.ylabel('Rate (MB/s)')
- pyplot.plot(rate)
+ pyplot.plot(rate, label='Rate')
#pyplot.plot(rate_average)
- pyplot.plot(rate_mean)
+ pyplot.plot(rate_mean, label='Mean')
+ pyplot.legend(('Rate', 'Mean'))
pyplot.subplot(212)
pyplot.grid(True)
pyplot.xlabel('Iteration')
pyplot.ylabel('Time to Write')
- pyplot.plot(elapsed)
+ pyplot.plot(elapsed, label='Elapsed')
#pyplot.plot(elapsed_average)
- pyplot.plot(elapsed_mean)
+ pyplot.plot(elapsed_mean, label='Mean')
+ pyplot.legend(('Elapsed', 'Mean'))
pyplot.savefig(os.path.join(sequence_test.path_results, string.join([k, str(sequence_test.size)], sep='_')))
pyplot.close()
-
+ \ No newline at end of file