Author: burton
Date: Wed Feb 16 11:13:16 2005
New Revision: 154067
URL: http://svn.apache.org/viewcvs?view=rev&rev=154067
Log:
init
Added:
jakarta/commons/sandbox/benchmark/
jakarta/commons/sandbox/benchmark/trunk/
jakarta/commons/sandbox/benchmark/trunk/src/
jakarta/commons/sandbox/benchmark/trunk/src/java/
jakarta/commons/sandbox/benchmark/trunk/src/java/org/
jakarta/commons/sandbox/benchmark/trunk/src/java/org/apache/
jakarta/commons/sandbox/benchmark/trunk/src/java/org/apache/commons/
jakarta/commons/sandbox/benchmark/trunk/src/java/org/apache/commons/benchmark/
jakarta/commons/sandbox/benchmark/trunk/src/java/org/apache/commons/benchmark/Benchmark.java
Added:
jakarta/commons/sandbox/benchmark/trunk/src/java/org/apache/commons/benchmark/Benchmark.java
URL:
http://svn.apache.org/viewcvs/jakarta/commons/sandbox/benchmark/trunk/src/java/org/apache/commons/benchmark/Benchmark.java?view=auto&rev=154067
==============================================================================
---
jakarta/commons/sandbox/benchmark/trunk/src/java/org/apache/commons/benchmark/Benchmark.java
(added)
+++
jakarta/commons/sandbox/benchmark/trunk/src/java/org/apache/commons/benchmark/Benchmark.java
Wed Feb 16 11:13:16 2005
@@ -0,0 +1,299 @@
+/*
+ * Copyright 1999,2004 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.commons.benchmark;
+
+import java.util.*;
+
+/**
+ * Benchmark that allows cheap and lightweight "benchmarking" (go figure) of
+ * arbitrary code. All you have to do is call start() every time a method
+ * starts which will then increment the benchmark and perform any operations
+ * necessary to maintain the benchmark. Just call complete() when your method
+ * is done.
+ *
+ * This class is lightweight (only requires a hashmap entry, and 32 bytes per
+ * benchmark of storage with no external requirements. This class is also
+ * threadsafe so if you need to call this from multithreaded code to benchmark
+ * then you'll be ok.
+ *
+ * The benchmark is maintained as number of starts and completes per minute.
+ * This can be any type of operation you want. Technically the interval can be
+ * longer than a minute but we will end up with stale data. That's the
tradeoff
+ * with this type of benchmark. Its cheap and easy to maintain but anything
+ * more than 60 seconds worth of data and you'll end up with a stale benchmark.
+ *
+ * Internally we use an incremented value which is accumulated and reset ever
60
+ * seconds. When we reset the benchmark we reset the current value so that we
+ * can start accumulating again.
+ *
+ * <code>
+ *
+ * Benchmark benchmark = Benchmark.getBenchmark( "foo" );
+ *
+ * try {
+ *
+ * benchmark.start();
+ *
+ * //do something expensive
+ *
+ * } finally {
+ * benchmark.complete();
+ * }
+ *
+ * </code>
+ *
+ * The method overhead is very light. One a modern machine you can perform
about
+ * 1M benchmarks per second. For code thats only called a few thousand times
+ * you won't notice any performance overhead.
+ *
+ * @author <a href="mailto:[EMAIL PROTECTED]">Kevin Burton</a>
+ * @version $Id: Benchmark.java,v 1.3 2005/02/16 02:28:09 burton Exp $
+ */
+public class Benchmark {
+
+ /*
+
+ TODO:
+
+ - Ability to enable logging with log4j and debug() so that during every
+ rotate of the benchmarks I will get a log.info() message. These should
be
+ log4j categories so that I can do benchmark.ksa.om.ArticlePeer.hasArticle
+ and then enable benchmarks via a log4j configurator.
+
+ - Potential bug. If we don't log anything > 5 minutes the LAST benchmark
+ will be rotated. I actually have to check if the last benchmark was more
+ than INTERVAL ago and if so then delete the last benchmark
+
+ - Potential bug. When we don't call start/complete for a long period of
+ time we wouldn't ever log our status.
+
+ - It might actually be BETTER to combine the last benchmark with the
current
+ benchmark but prorate the current value and then avg() them. This would
+ give us a more realistic "ETA" style benchmark of current performance.
+ For stats which don't update very much or for 5 minute interval stats
this
+ would be perfect.
+
+ - We're going to need a config file like log4j...
+
+ */
+
+ //FIXME: how can we measure the TOTAL time we've spent between
+ //start/complete ?? This might be important because for tasks like SQL
+ //selects we'll need to time the duration we spend in the methods. I'm not
+ //sure its even possible with the current API. This actually doesn't make
+ //much sense in threadtime if you think about it. For example 100 threads
+ //could be started which each take 60 seconds to finish. If they all
finish
+ //at the same time we would compute that 60000 seconds of machine time were
+ //used
+
+ /**
+ * How often should we maintain/reset the benchmark.
+ */
+ public static long INTERVAL = 60 * 1000;
+
+ /**
+ * Disable all logging of benchmarks. This essentially makes the
+ * performance overhead zero.
+ */
+ public static boolean DISABLED = false;
+
+ /**
+ * Maintain a metadata map between the name and BMeta classes.
+ */
+ public static HashMap benchmarks = new HashMap();
+
+ private Object MUTEX = new Object();
+
+ private String name = null;
+
+ /**
+ * The time the current benchmark was started. -1 for never started.
+ */
+ public long timestamp = -1;
+
+ /**
+ * The current number of started benchmarks
+ */
+ public long started = 0;
+
+ /**
+ * The current number of completed benchmarks
+ */
+ public long completed = 0;
+
+ public long totalStarted = 0;
+
+ public long totalCompleted = 0;
+
+ // **** metadata about this benchmark
***************************************
+
+ public long getTotalStarted() {
+ return totalStarted;
+ }
+
+ public long getTotalCompleted() {
+ return totalCompleted;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ // **** implementation code for start/complete
**********************************
+
+ public Benchmark( String name ) {
+ this.name = name;
+ }
+
+ /**
+ * Reset stats if necessary.
+ *
+ * @author <a href="mailto:[EMAIL PROTECTED]">Kevin A. Burton</a>
+ */
+ private void doReset() {
+
+ long now = System.currentTimeMillis();
+
+ if ( now - timestamp > INTERVAL ) {
+
+ //need to perform a swap and save the current benchmark.
+ totalStarted = started;
+ totalCompleted = completed;
+
+ //reset the benchmark
+ timestamp = now;
+ started = 0;
+ completed = 0;
+
+ }
+
+ }
+
+ public void start() {
+
+ if ( DISABLED )
+ return;
+
+ //we need to synchronize on this individual metadata unit because if we
+ //didn't then another thread could come in, and corrupt our metadata
+ //about this benchmark. Since benchmarks are often performed within
+ //threads this is important.
+ synchronized( MUTEX ) {
+
+ doReset();
+ ++started;
+
+ }
+
+ }
+
+ public void complete() {
+
+ if ( DISABLED )
+ return;
+
+ synchronized( MUTEX ) {
+
+ doReset();
+ ++completed;
+
+ }
+
+ }
+
+ /**
+ * Return a child benchmark of the current method. This can be used to
+ * return a benchmark for a specific method based on a benchmark for a
+ * class.
+ *
+ * @author <a href="mailto:[EMAIL PROTECTED]">Kevin A. Burton</a>
+ */
+ public Benchmark child( String name ) {
+
+ return getBenchmark( this.name + "." + name );
+
+ }
+
+ // **** static code
*********************************************************
+
+ public static Benchmark getBenchmark() {
+
+ Exception e = new Exception();
+ String name = e.getStackTrace()[1].getClassName();
+ return getBenchmark( name );
+
+ }
+
+ public static Benchmark getBenchmark( Class clazz ) {
+ return getBenchmark( clazz.getName() );
+ }
+
+ public static Benchmark getBenchmark( String name ) {
+
+ synchronized( benchmarks ) {
+
+ Benchmark benchmark = (Benchmark)benchmarks.get( name );
+
+ if ( benchmark == null ) {
+ benchmark= new Benchmark( name );
+ benchmarks.put( name, benchmark );
+ }
+
+ return benchmark;
+
+ }
+
+ }
+
+ // **** test code
***********************************************************
+
+ public static void main( String[] args ) throws Exception {
+
+ //FIXME: we should also support benchmarking of code regions and
whether
+ //we're IN a code region. That would be interesting to. Have a bunch
+ //of start() complete() methods and then I could benchmark how many
are open
+ //at a given time.
+ //
+ // This would support total number of completed operations and total
+ // number of pending operations within a given timeframe.
+
+ //simple testing framework
+
+ INTERVAL = 10 * 1000;
+
+ Benchmark benchmark = getBenchmark( Benchmark.class );
+ benchmark = benchmark.child( "main" );
+
+ System.out.println( "name: " + benchmark.getName() );
+
+ for ( int i = 0; i < 60; ++i ) {
+
+ benchmark.start();
+
+ //do something slow...
+ Thread.sleep( INTERVAL + 10 );
+
+ benchmark.complete();
+
+ System.out.println( "started: " + benchmark.getTotalStarted() );
+ System.out.println( "completed: " + benchmark.getTotalCompleted()
);
+
+ }
+
+ }
+
+}
---------------------------------------------------------------------
To unsubscribe, e-mail: [EMAIL PROTECTED]
For additional commands, e-mail: [EMAIL PROTECTED]