aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorCarl Friedrich Bolz <cfbolz@gmx.de>2008-01-27 13:17:10 +0000
committerCarl Friedrich Bolz <cfbolz@gmx.de>2008-01-27 13:17:10 +0000
commitd968ae6f7991f91297bf69c2df151fb07011ed24 (patch)
treecdd2d3cd9653e28f3a32ecb9c4c1f9f96242ef1a /pypy/jit/conftest.py
parentkill the JIT dir to restore it to its old state (diff)
downloadpypy-d968ae6f7991f91297bf69c2df151fb07011ed24.tar.gz
pypy-d968ae6f7991f91297bf69c2df151fb07011ed24.tar.bz2
pypy-d968ae6f7991f91297bf69c2df151fb07011ed24.zip
restore the jit dir to the state before Maciek's branch
Diffstat (limited to 'pypy/jit/conftest.py')
-rw-r--r--pypy/jit/conftest.py72
1 files changed, 72 insertions, 0 deletions
diff --git a/pypy/jit/conftest.py b/pypy/jit/conftest.py
new file mode 100644
index 0000000000..e66b75d5bb
--- /dev/null
+++ b/pypy/jit/conftest.py
@@ -0,0 +1,72 @@
+import py, time, os
+
+# Usage: py.test --benchmark
+#
+# This module provides an RPython class to use as follows:
+#
+# bench = Benchmark()
+# while 1:
+# do_something
+# if bench.stop():
+# break
+
+Option = py.test.config.Option
+
+option = py.test.config.addoptions("pypy options",
+ Option('--benchmark', action="store_true",
+ dest="benchmark", default=False,
+ help="give benchmarks in tests that support it"),
+ )
+
+
+class Benchmark(object):
+ RUN_TIME = 2.0 # repeat the benchmarked loop for two seconds
+
+ class __metaclass__(type):
+ def ENABLED(cls):
+ return option.benchmark
+ ENABLED = property(ENABLED)
+
+ def __init__(self, name=''):
+ self.name = name
+ self.iterations = 0
+ self.million_iterations = 0
+ self.nextcheck = 0
+ self.starttime = time.time()
+
+ def stop(self):
+ iterations = self.iterations = self.iterations + 1
+ if not Benchmark.ENABLED: # only run once if not benchmarking
+ return True
+ if iterations < self.nextcheck:
+ return False # continue, don't call time.time() too often
+ now = time.time()
+ if now - self.starttime < self.RUN_TIME:
+ if iterations > 1000000: # avoid wrap-around trouble
+ self.million_iterations += 1
+ self.iterations -= 1000000
+ self.nextcheck = 200000
+ else:
+ self.nextcheck = iterations * 5 // 4
+ return False # continue looping
+ self.endtime = now
+ self.print_report()
+ return True
+
+ def print_report(self):
+ elapsed = self.endtime - self.starttime
+ iterations = float(self.million_iterations) * 1000000.0
+ iterations += self.iterations
+ prefix = self.name
+ if prefix:
+ prefix += ': '
+ result = iterations / elapsed
+ if result <= 1000:
+ s = '%f' % result
+ else:
+ s = '%d' % int(result)
+ i = len(s)-3
+ while i > 0:
+ s = s[:i] + "'" + s[i:]
+ i -= 3
+ os.write(1, '{%s%s iterations/second}\n' % (prefix, s))