Author: edwardyoon
Date: Mon Jul 28 21:33:49 2008
New Revision: 680624
URL: http://svn.apache.org/viewvc?rev=680624&view=rev
Log:
Upgrade dependencies (hadoop-0.17.0 -> hadoop-0.17.1, hbase-1.0 -> hbase-2.0
trunk)
Added:
incubator/hama/trunk/lib/hadoop-0.17.1-core.jar (with props)
incubator/hama/trunk/lib/hadoop-0.17.1-test.jar (with props)
incubator/hama/trunk/lib/hbase-0.2.0-test.jar (with props)
incubator/hama/trunk/lib/hbase-0.2.0.jar (with props)
incubator/hama/trunk/src/java/org/apache/hama/HamaConstants.java
incubator/hama/trunk/src/java/overview.html
Removed:
incubator/hama/trunk/lib/hadoop-0.17.0-dev.2008-03-04_15-19-00-core.jar
incubator/hama/trunk/lib/hadoop-0.17.0-dev.2008-03-04_15-19-00-test.jar
incubator/hama/trunk/lib/hbase-0.2.0-dev-test.jar
incubator/hama/trunk/lib/hbase-0.2.0-dev.jar
incubator/hama/trunk/src/java/org/apache/hama/Constants.java
incubator/hama/trunk/src/java/org/apache/hama/FractionMatrix.java
incubator/hama/trunk/src/java/org/apache/hama/TriangularMatrix.java
incubator/hama/trunk/src/java/org/apache/hama/algebra/
incubator/hama/trunk/src/java/org/apache/hama/mapred/
Modified:
incubator/hama/trunk/ (props changed)
incubator/hama/trunk/src/examples/org/apache/hama/examples/MatrixMultiplication.java
incubator/hama/trunk/src/java/org/apache/hama/AbstractMatrix.java
incubator/hama/trunk/src/java/org/apache/hama/FeatureVector.java
incubator/hama/trunk/src/java/org/apache/hama/Matrix.java
incubator/hama/trunk/src/java/org/apache/hama/MatrixInterface.java
incubator/hama/trunk/src/java/org/apache/hama/RandomVariable.java
incubator/hama/trunk/src/test/org/apache/hama/HamaTestCase.java
incubator/hama/trunk/src/test/org/apache/hama/TestFeatureVector.java
incubator/hama/trunk/src/test/org/apache/hama/TestMatrix.java
incubator/hama/trunk/src/test/org/apache/hama/TestRandomVariable.java
incubator/hama/trunk/src/test/org/apache/hama/mapred/TestMatrixMapReduce.java
Propchange: incubator/hama/trunk/
------------------------------------------------------------------------------
--- svn:ignore (original)
+++ svn:ignore Mon Jul 28 21:33:49 2008
@@ -1,3 +1,4 @@
-.project
-.classpath
-build
+.project
+.classpath
+build
+.fbprefs
Added: incubator/hama/trunk/lib/hadoop-0.17.1-core.jar
URL:
http://svn.apache.org/viewvc/incubator/hama/trunk/lib/hadoop-0.17.1-core.jar?rev=680624&view=auto
==============================================================================
Binary file - no diff available.
Propchange: incubator/hama/trunk/lib/hadoop-0.17.1-core.jar
------------------------------------------------------------------------------
svn:mime-type = application/octet-stream
Added: incubator/hama/trunk/lib/hadoop-0.17.1-test.jar
URL:
http://svn.apache.org/viewvc/incubator/hama/trunk/lib/hadoop-0.17.1-test.jar?rev=680624&view=auto
==============================================================================
Binary file - no diff available.
Propchange: incubator/hama/trunk/lib/hadoop-0.17.1-test.jar
------------------------------------------------------------------------------
svn:mime-type = application/octet-stream
Added: incubator/hama/trunk/lib/hbase-0.2.0-test.jar
URL:
http://svn.apache.org/viewvc/incubator/hama/trunk/lib/hbase-0.2.0-test.jar?rev=680624&view=auto
==============================================================================
Binary file - no diff available.
Propchange: incubator/hama/trunk/lib/hbase-0.2.0-test.jar
------------------------------------------------------------------------------
svn:mime-type = application/octet-stream
Added: incubator/hama/trunk/lib/hbase-0.2.0.jar
URL:
http://svn.apache.org/viewvc/incubator/hama/trunk/lib/hbase-0.2.0.jar?rev=680624&view=auto
==============================================================================
Binary file - no diff available.
Propchange: incubator/hama/trunk/lib/hbase-0.2.0.jar
------------------------------------------------------------------------------
svn:mime-type = application/octet-stream
Modified:
incubator/hama/trunk/src/examples/org/apache/hama/examples/MatrixMultiplication.java
URL:
http://svn.apache.org/viewvc/incubator/hama/trunk/src/examples/org/apache/hama/examples/MatrixMultiplication.java?rev=680624&r1=680623&r2=680624&view=diff
==============================================================================
---
incubator/hama/trunk/src/examples/org/apache/hama/examples/MatrixMultiplication.java
(original)
+++
incubator/hama/trunk/src/examples/org/apache/hama/examples/MatrixMultiplication.java
Mon Jul 28 21:33:49 2008
@@ -26,6 +26,7 @@
public class MatrixMultiplication {
public static void main(String[] args) {
+ /*
if (args.length < 3) {
System.out.println("multiplication <map_num> <row_m> <column_n>");
System.exit(-1);
@@ -55,6 +56,7 @@
a.clear();
b.clear();
c.clear();
+ */
}
public static String executeTime(long start, long end) {
Modified: incubator/hama/trunk/src/java/org/apache/hama/AbstractMatrix.java
URL:
http://svn.apache.org/viewvc/incubator/hama/trunk/src/java/org/apache/hama/AbstractMatrix.java?rev=680624&r1=680623&r2=680624&view=diff
==============================================================================
--- incubator/hama/trunk/src/java/org/apache/hama/AbstractMatrix.java (original)
+++ incubator/hama/trunk/src/java/org/apache/hama/AbstractMatrix.java Mon Jul
28 21:33:49 2008
@@ -20,9 +20,6 @@
package org.apache.hama;
import java.io.IOException;
-import java.util.Map;
-import java.util.SortedMap;
-import java.util.TreeMap;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -33,15 +30,15 @@
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.io.BatchUpdate;
import org.apache.hadoop.hbase.io.Cell;
+import org.apache.hadoop.hbase.io.RowResult;
+import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.Text;
-import org.apache.hama.mapred.MatrixMapReduce;
import org.apache.log4j.Logger;
/**
* Methods of the matrix classes
*/
-public abstract class AbstractMatrix extends MatrixMapReduce implements
- MatrixInterface {
+public abstract class AbstractMatrix implements MatrixInterface {
static final Logger LOG = Logger.getLogger(AbstractMatrix.class);
/** Hbase Configuration */
@@ -80,7 +77,8 @@
*/
protected void create() {
try {
- tableDesc.addFamily(new
HColumnDescriptor(Constants.METADATA.toString()));
+ tableDesc.addFamily(new HColumnDescriptor(HamaConstants.METADATA
+ .toString()));
LOG.info("Initializaing.");
admin.createTable(tableDesc);
} catch (IOException e) {
@@ -92,35 +90,36 @@
public int getRowDimension() {
Cell rows = null;
try {
- rows = table.get(Constants.METADATA, Constants.METADATA_ROWS);
+ rows = table.get(HamaConstants.METADATA, HamaConstants.METADATA_ROWS);
} catch (IOException e) {
LOG.error(e, e);
}
- return bytesToInt(rows.getValue());
+ return Bytes.toInt(rows.getValue());
}
/** [EMAIL PROTECTED] */
public int getColumnDimension() {
Cell columns = null;
try {
- columns = table.get(Constants.METADATA, Constants.METADATA_COLUMNS);
+ columns = table.get(HamaConstants.METADATA,
+ HamaConstants.METADATA_COLUMNS);
} catch (IOException e) {
LOG.error(e, e);
}
- return bytesToInt(columns.getValue());
+ return Bytes.toInt(columns.getValue());
}
/** [EMAIL PROTECTED] */
public double get(int i, int j) {
Text row = new Text(String.valueOf(i));
- Text column = new Text(Constants.COLUMN + String.valueOf(j));
+ Text column = new Text(HamaConstants.COLUMN + String.valueOf(j));
Cell c;
double result = -1;
try {
c = table.get(row, column);
if (c != null) {
- result = toDouble(c.getValue());
+ result = bytesToDouble(c.getValue());
}
} catch (IOException e) {
LOG.error(e, e);
@@ -128,25 +127,37 @@
return result;
}
+ public double bytesToDouble(byte[] b) {
+ return Double.parseDouble(Bytes.toString(b));
+ }
+
+ public byte[] doubleToBytes(Double d) {
+ return Bytes.toBytes(d.toString());
+ }
+
/** [EMAIL PROTECTED] */
- public FeatureVector getRowVector(int row) {
+ public RowResult getRowResult(byte[] row) {
try {
- SortedMap<Integer, Double> result = new TreeMap<Integer, Double>();
- for (Map.Entry<Text, Cell> f : table
- .getRow(new Text(String.valueOf(row))).entrySet()) {
- result.put(getIndex(f.getKey()), toDouble(f.getValue().getValue()));
- }
- return new FeatureVector(result);
+ return table.getRow(row);
} catch (IOException e) {
e.printStackTrace();
- return null;
}
+ return null;
}
+ public RowResult getRowResult(int row) {
+ try {
+ return table.getRow(String.valueOf(row).getBytes());
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ return null;
+ }
+
/** [EMAIL PROTECTED] */
public void set(int i, int j, double d) {
BatchUpdate b = new BatchUpdate(new Text(String.valueOf(i)));
- b.put(new Text(Constants.COLUMN + String.valueOf(j)), toBytes(d));
+ b.put(new Text(HamaConstants.COLUMN + String.valueOf(j)),
doubleToBytes(d));
try {
table.commit(b);
} catch (IOException e) {
@@ -176,9 +187,9 @@
/** [EMAIL PROTECTED] */
public void setDimension(int rows, int columns) {
- BatchUpdate b = new BatchUpdate(Constants.METADATA);
- b.put(Constants.METADATA_ROWS, intToBytes(rows));
- b.put(Constants.METADATA_COLUMNS, intToBytes(columns));
+ BatchUpdate b = new BatchUpdate(HamaConstants.METADATA);
+ b.put(HamaConstants.METADATA_ROWS, Bytes.toBytes(rows));
+ b.put(HamaConstants.METADATA_COLUMNS, Bytes.toBytes(columns));
try {
table.commit(b);
@@ -199,9 +210,9 @@
*/
public double getDeterminant() {
try {
- return toDouble(table.get(
- new Text(String.valueOf(Constants.DETERMINANT)),
- new Text(Constants.COLUMN)).getValue());
+ return bytesToDouble(table.get(
+ new Text(String.valueOf(HamaConstants.DETERMINANT)),
+ new Text(HamaConstants.COLUMN)).getValue());
} catch (IOException e) {
LOG.error(e, e);
return -1;
@@ -234,4 +245,16 @@
LOG.error(e, e);
}
}
+
+ /**
+ * Return the integer column index
+ *
+ * @param b key
+ * @return integer
+ */
+ public int getColumnIndex(byte[] b) {
+ String cKey = new String(b);
+ return Integer.parseInt(cKey
+ .substring(cKey.indexOf(":") + 1, cKey.length()));
+ }
}
Modified: incubator/hama/trunk/src/java/org/apache/hama/FeatureVector.java
URL:
http://svn.apache.org/viewvc/incubator/hama/trunk/src/java/org/apache/hama/FeatureVector.java?rev=680624&r1=680623&r2=680624&view=diff
==============================================================================
--- incubator/hama/trunk/src/java/org/apache/hama/FeatureVector.java (original)
+++ incubator/hama/trunk/src/java/org/apache/hama/FeatureVector.java Mon Jul 28
21:33:49 2008
@@ -19,9 +19,16 @@
*/
package org.apache.hama;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
import java.util.Map;
import java.util.SortedMap;
+import java.util.TreeMap;
+import org.apache.hadoop.hbase.io.Cell;
+import org.apache.hadoop.hbase.io.HbaseMapWritable;
+import org.apache.hadoop.hbase.io.RowResult;
+import org.apache.hadoop.hbase.util.Bytes;
import org.apache.log4j.Logger;
/**
@@ -35,13 +42,27 @@
static final Logger LOG = Logger.getLogger(FeatureVector.class);
protected int[] m_dims;
protected double[] m_vals;
+ protected SortedMap<Integer, Double> vector;
- public FeatureVector(SortedMap<Integer, Double> result) {
- this.m_dims = new int[result.keySet().size()];
- this.m_vals = new double[result.keySet().size()];
+ public FeatureVector(RowResult r) {
+ SortedMap<Integer, Double> m = new TreeMap<Integer, Double>();
+
+ for (Map.Entry<byte[], Cell> f : r.entrySet()) {
+ m.put(getColumnIndex(f.getKey()),
Double.parseDouble(Bytes.toString(f.getValue().getValue())));
+ }
+ parse(m);
+ }
+
+ public FeatureVector(SortedMap<Integer, Double> m) {
+ parse(m);
+ }
+
+ public void parse(SortedMap<Integer, Double> m) {
+ this.m_dims = new int[m.keySet().size()];
+ this.m_vals = new double[m.keySet().size()];
int i = 0;
- for (Map.Entry<Integer, Double> f : result.entrySet()) {
+ for (Map.Entry<Integer, Double> f : m.entrySet()) {
this.m_dims[i] = f.getKey();
this.m_vals[i] = f.getValue();
i++;
@@ -99,4 +120,74 @@
return m_dims.length;
}
+ public FeatureVector addition(FeatureVector v2) {
+ SortedMap<Integer, Double> v3 = new TreeMap<Integer, Double>();
+ if (this.size() == v2.size()) {
+ for (int i = 0; i < this.size(); i++) {
+ LOG.info("Addition: " + this.getValueAt(i) + ", " + v2.getValueAt(i));
+ double value = (this.getValueAt(i) + v2.getValueAt(i));
+ v3.put(i, value);
+ }
+
+ return new FeatureVector(v3);
+ } else {
+ return null;
+ }
+ }
+
+ /**
+ * Return the integer column index
+ *
+ * @param b key
+ * @return integer
+ */
+ public int getColumnIndex(byte[] b) {
+ String cKey = new String(b);
+ return Integer.parseInt(cKey
+ .substring(cKey.indexOf(":") + 1, cKey.length()));
+ }
+
+ /**
+ * Converts the bytes to double
+ *
+ * @param inBytes
+ * @return double
+ */
+ public double toDouble(byte[] inBytes) {
+ if (inBytes == null) {
+ return 0;
+ }
+
+ long n = 0;
+ for (int i = 0; i < inBytes.length; i++) {
+ n |= ((long) (inBytes[i] & 0377) << (i * 8));
+ }
+
+ double doubleValue = Double.longBitsToDouble(n);
+
+ return doubleValue;
+ }
+
+ /**
+ * Converts the int to byte array
+ *
+ * @param i
+ * @return Byte Array
+ */
+ public byte[] intToBytes(int i) {
+ ByteBuffer bb = ByteBuffer.allocate(4);
+ bb.order(ByteOrder.nativeOrder());
+ bb.putInt(i);
+ return bb.array();
+ }
+
+ public RowResult getRowResult(byte[] row) {
+ HbaseMapWritable<byte[], Cell> trunk = new HbaseMapWritable<byte[],
Cell>();
+ for (int i = 0; i < this.size(); i++) {
+ Cell cValue = new Cell(String.valueOf(this.getValueAt(i)), 0);
+ trunk.put(Bytes.toBytes("column:" + i), cValue);
+ }
+
+ return new RowResult(row, trunk);
+ }
}
Added: incubator/hama/trunk/src/java/org/apache/hama/HamaConstants.java
URL:
http://svn.apache.org/viewvc/incubator/hama/trunk/src/java/org/apache/hama/HamaConstants.java?rev=680624&view=auto
==============================================================================
--- incubator/hama/trunk/src/java/org/apache/hama/HamaConstants.java (added)
+++ incubator/hama/trunk/src/java/org/apache/hama/HamaConstants.java Mon Jul 28
21:33:49 2008
@@ -0,0 +1,62 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hama;
+
+import org.apache.hadoop.io.Text;
+
+/**
+ * Some constants used in the hama
+ */
+public class HamaConstants {
+ /** Meta-columnfamily to store the matrix-info */
+ public final static Text METADATA = new Text("metadata:");
+ /** The number of the matrix rows */
+ public final static Text METADATA_ROWS = new Text("metadata:rows");
+ /** The number of the matrix columns */
+ public final static Text METADATA_COLUMNS = new Text("metadata:columns");
+ /** The type of the matrix */
+ public final static Text METADATA_TYPE = new Text("metadata:type");
+
+ /** plus operator */
+ public final static String PLUS = "+";
+ /** minus operator */
+ public final static String MINUS = "-";
+
+ /** Default columnfamily name */
+ public final static Text COLUMN = new Text("column:");
+ /** The numerator version of the fraction matrix */
+ public final static Text NUMERATOR = new Text("numerator:");
+ /** The denominator version of the fration matrix */
+ public final static Text DENOMINATOR = new Text("denominator:");
+ /** The original version of the fraction matrix */
+ public final static Text ORIGINAL = new Text("original:");
+ /** The lower matrix version of the triangular matrix */
+ public final static Text LOWER = new Text("lower:");
+ /** The upper matrix version of the triangular matrix */
+ public final static Text UPPER = new Text("upper:");
+
+ /** A determinant value record */
+ public final static Text DETERMINANT = new Text("determinant");
+
+ /** Temporary random matices name-head */
+ public final static String RANDOM = "rand";
+ /** Temporary result matices name-head */
+ public final static String RESULT = "result";
+}
Modified: incubator/hama/trunk/src/java/org/apache/hama/Matrix.java
URL:
http://svn.apache.org/viewvc/incubator/hama/trunk/src/java/org/apache/hama/Matrix.java?rev=680624&r1=680623&r2=680624&view=diff
==============================================================================
--- incubator/hama/trunk/src/java/org/apache/hama/Matrix.java (original)
+++ incubator/hama/trunk/src/java/org/apache/hama/Matrix.java Mon Jul 28
21:33:49 2008
@@ -27,18 +27,6 @@
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapred.JobClient;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.hama.algebra.AdditionSubtractionMap;
-import org.apache.hama.algebra.AdditionSubtractionReduce;
-import org.apache.hama.algebra.CholeskyDecompositionMap;
-import org.apache.hama.algebra.CroutDecompositionMap;
-import org.apache.hama.algebra.DeterminantMap;
-import org.apache.hama.algebra.DeterminantReduce;
-import org.apache.hama.algebra.MultiplicationMap;
-import org.apache.hama.algebra.MultiplicationReduce;
-import org.apache.hama.mapred.MatrixInputFormat;
-import org.apache.hama.mapred.MatrixOutputFormat;
/**
* A library for mathematical operations on matrices of double.
@@ -67,7 +55,8 @@
if (!admin.tableExists(matrixName)) {
tableDesc = new HTableDescriptor(matrixName.toString());
- tableDesc.addFamily(new
HColumnDescriptor(Constants.COLUMN.toString()));
+ tableDesc.addFamily(new HColumnDescriptor(HamaConstants.COLUMN
+ .toString()));
create();
}
@@ -89,11 +78,12 @@
public Matrix(HBaseConfiguration conf, int m, int n, double s) {
try {
setConfiguration(conf);
- matrixName = new Text(Constants.RANDOM + System.currentTimeMillis());
+ matrixName = RandomVariable.randMatrixName();
if (!admin.tableExists(matrixName)) {
tableDesc = new HTableDescriptor(matrixName.toString());
- tableDesc.addFamily(new
HColumnDescriptor(Constants.COLUMN.toString()));
+ tableDesc.addFamily(new HColumnDescriptor(HamaConstants.COLUMN
+ .toString()));
create();
}
@@ -120,8 +110,7 @@
* @return an m-by-n matrix with uniformly distributed random elements.
*/
public static Matrix random(Configuration conf, int m, int n) {
- Text name = new Text(Constants.RANDOM + System.currentTimeMillis());
- Matrix rand = new Matrix(conf, name);
+ Matrix rand = new Matrix(conf, RandomVariable.randMatrixName());
for (int i = 0; i < m; i++) {
for (int j = 0; j < n; j++) {
rand.set(i, j, RandomVariable.rand());
@@ -129,7 +118,6 @@
}
rand.setDimension(m, n);
-
return rand;
}
@@ -148,156 +136,76 @@
/** [EMAIL PROTECTED] */
public Matrix multiply(Matrix b) {
- String output = Constants.RESULT + System.currentTimeMillis();
- Matrix c = new Matrix(config, new Text(output));
-
- JobConf jobConf = new JobConf(config);
- jobConf.setJobName("parallel matrix multiplication of " + getName() + "
and " + b.getName());
- jobConf.setInputFormat(MatrixInputFormat.class);
- jobConf.setOutputFormat(MatrixOutputFormat.class);
- MultiplicationMap.initJob(getName(), b.getName(), MultiplicationMap.class,
jobConf);
- MultiplicationReduce.initJob(output, MultiplicationReduce.class, jobConf);
-
- jobConf.setNumMapTasks(mapper);
- jobConf.setNumReduceTasks(reducer);
-
- try {
- JobClient.runJob(jobConf);
- } catch (IOException e) {
- LOG.info(e);
- }
-
- return c;
+ /*
+ * String output = Constants.RESULT + System.currentTimeMillis(); Matrix c
=
+ * new Matrix(config, new Text(output)); JobConf jobConf = new
+ * JobConf(config); jobConf.setJobName("parallel matrix multiplication of
" +
+ * getName() + " and " + b.getName());
+ * jobConf.setInputFormat(MatrixInputFormat.class);
+ * jobConf.setOutputFormat(MatrixOutputFormat.class);
+ * MultiplicationMap.initJob(getName(), b.getName(),
+ * MultiplicationMap.class, jobConf); MultiplicationReduce.initJob(output,
+ * MultiplicationReduce.class, jobConf); jobConf.setNumMapTasks(mapper);
+ * jobConf.setNumReduceTasks(reducer); try { JobClient.runJob(jobConf); }
+ * catch (IOException e) { LOG.info(e); } return c;
+ */
+ return null;
}
/** [EMAIL PROTECTED] */
public Matrix addition(Matrix b) {
- return additionSubtraction(b, Constants.PLUS);
+ return additionSubtraction(b, HamaConstants.PLUS);
}
/** [EMAIL PROTECTED] */
public Matrix subtraction(Matrix b) {
- return additionSubtraction(b, Constants.PLUS);
+ return additionSubtraction(b, HamaConstants.PLUS);
}
/**
- * Method for add or subtract operation
+ * Method for add or subtract operation
*
* @param target
* @param operator
* @return matrix
- */
+ */
public Matrix additionSubtraction(Matrix target, String operator) {
- String b = target.getName();
- String output = Constants.RESULT + System.currentTimeMillis();
- Matrix c = new Matrix(config, new Text(output));
- String jobName = "parallel matrix " + operator + " of " + getName() + "
and " + b;
- LOG.info(jobName);
-
- JobConf jobConf = new JobConf(config);
- jobConf.setJobName(jobName);
- jobConf.setInputFormat(MatrixInputFormat.class);
- jobConf.setOutputFormat(MatrixOutputFormat.class);
- AdditionSubtractionMap.initJob(getName(), b, operator,
- AdditionSubtractionMap.class, jobConf);
- AdditionSubtractionReduce.initJob(output,
- AdditionSubtractionReduce.class, jobConf);
-
- jobConf.setNumMapTasks(mapper);
- jobConf.setNumReduceTasks(reducer);
-
- try {
- JobClient.runJob(jobConf);
- } catch (IOException e) {
- LOG.info(e);
- }
-
- return c;
+ /*
+ * String b = target.getName(); String output = Constants.RESULT +
+ * System.currentTimeMillis(); Matrix c = new Matrix(config, new
+ * Text(output)); String jobName = "parallel matrix " + operator + " of " +
+ * getName() + " and " + b; LOG.info(jobName); JobConf jobConf = new
+ * JobConf(config); jobConf.setJobName(jobName);
+ * jobConf.setInputFormat(MatrixInputFormat.class);
+ * jobConf.setOutputFormat(MatrixOutputFormat.class);
+ * AdditionSubtractionMap.initJob(getName(), b, operator,
+ * AdditionSubtractionMap.class, jobConf);
+ * AdditionSubtractionReduce.initJob(output,
+ * AdditionSubtractionReduce.class, jobConf);
+ * jobConf.setNumMapTasks(mapper); jobConf.setNumReduceTasks(reducer); try
{
+ * JobClient.runJob(jobConf); } catch (IOException e) { LOG.info(e); }
+ * return c;
+ */
+ return null;
}
/** [EMAIL PROTECTED] */
public double determinant() {
- JobConf jobConf = new JobConf(config);
- jobConf.setJobName("matrix determinant");
-
- String check = Constants.RESULT + System.currentTimeMillis();
- Matrix c = new Matrix(config, new Text(check));
- for (int i = 0; i < getRowDimension(); i++) {
- c.set(i, 0, 1.0);
- }
- c.setDimension(getRowDimension(), 0);
- jobConf.setInputFormat(MatrixInputFormat.class);
- jobConf.setOutputFormat(MatrixOutputFormat.class);
- DeterminantMap.initJob(getName(), check, DeterminantMap.class, jobConf);
- DeterminantReduce.initJob(getName(), DeterminantReduce.class, jobConf);
-
- jobConf.setNumMapTasks(mapper);
- jobConf.setNumReduceTasks(reducer);
-
- try {
- JobClient.runJob(jobConf);
- } catch (IOException e) {
- LOG.info(e);
- }
-
- c.clear();
- return getDeterminant();
- }
-
- /** [EMAIL PROTECTED] */
- public TriangularMatrix decompose(Decomposition technique) {
- if (technique.equals(Decomposition.Cholesky))
- return choleskyDecompose();
- else if (technique.equals(Decomposition.Crout))
- return croutDecompose();
- else
- return null;
- }
-
- private TriangularMatrix croutDecompose() {
- JobConf jobConf = new JobConf(config);
- jobConf.setJobName("Crout Decomposition");
-
- String output = Constants.RESULT + System.currentTimeMillis();
- TriangularMatrix b = new TriangularMatrix(config, new Text(output));
- jobConf.setInputFormat(MatrixInputFormat.class);
- jobConf.setOutputFormat(MatrixOutputFormat.class);
- CroutDecompositionMap.initJob(getName(), output,
- CroutDecompositionMap.class, jobConf);
-
- jobConf.setNumMapTasks(mapper);
- jobConf.setNumReduceTasks(reducer);
-
- try {
- JobClient.runJob(jobConf);
- } catch (IOException e) {
- LOG.info(e);
- }
-
- return b;
- }
-
- private TriangularMatrix choleskyDecompose() {
- JobConf jobConf = new JobConf(config);
- jobConf.setJobName("Cholesky Decomposition");
-
- String output = Constants.RESULT + System.currentTimeMillis();
- TriangularMatrix b = new TriangularMatrix(config, new Text(output));
- jobConf.setInputFormat(MatrixInputFormat.class);
- jobConf.setOutputFormat(MatrixOutputFormat.class);
- CholeskyDecompositionMap.initJob(getName(), output,
- CholeskyDecompositionMap.class, jobConf);
-
- jobConf.setNumMapTasks(mapper);
- jobConf.setNumReduceTasks(reducer);
-
- try {
- JobClient.runJob(jobConf);
- } catch (IOException e) {
- LOG.info(e);
- }
-
- return b;
+ /*
+ * JobConf jobConf = new JobConf(config); jobConf.setJobName("matrix
+ * determinant"); String check = Constants.RESULT +
+ * System.currentTimeMillis(); Matrix c = new Matrix(config, new
+ * Text(check)); for (int i = 0; i < getRowDimension(); i++) { c.set(i, 0,
+ * 1.0); } c.setDimension(getRowDimension(), 0);
+ * jobConf.setInputFormat(MatrixInputFormat.class);
+ * jobConf.setOutputFormat(MatrixOutputFormat.class);
+ * DeterminantMap.initJob(getName(), check, DeterminantMap.class, jobConf);
+ * DeterminantReduce.initJob(getName(), DeterminantReduce.class, jobConf);
+ * jobConf.setNumMapTasks(mapper); jobConf.setNumReduceTasks(reducer); try
{
+ * JobClient.runJob(jobConf); } catch (IOException e) { LOG.info(e); }
+ * c.clear(); return getDeterminant();
+ */
+ return 0;
}
}
Modified: incubator/hama/trunk/src/java/org/apache/hama/MatrixInterface.java
URL:
http://svn.apache.org/viewvc/incubator/hama/trunk/src/java/org/apache/hama/MatrixInterface.java?rev=680624&r1=680623&r2=680624&view=diff
==============================================================================
--- incubator/hama/trunk/src/java/org/apache/hama/MatrixInterface.java
(original)
+++ incubator/hama/trunk/src/java/org/apache/hama/MatrixInterface.java Mon Jul
28 21:33:49 2008
@@ -19,6 +19,7 @@
*/
package org.apache.hama;
+import org.apache.hadoop.hbase.io.RowResult;
/**
* Basic matrix interface. It holds <code>double</code>s in a rectangular 2D
@@ -42,7 +43,7 @@
* @param row the row index of the matrix
* @return the feature vector of row
*/
- public FeatureVector getRowVector(int row);
+ public RowResult getRowResult(byte[] row);
/**
* Sets the double value of (i, j)
@@ -163,8 +164,7 @@
*
* @return the decomposed result
*/
- public TriangularMatrix decompose(Decomposition technique);
-
+ // public TriangularMatrix decompose(Decomposition technique);
/**
* Clear object
*/
Modified: incubator/hama/trunk/src/java/org/apache/hama/RandomVariable.java
URL:
http://svn.apache.org/viewvc/incubator/hama/trunk/src/java/org/apache/hama/RandomVariable.java?rev=680624&r1=680623&r2=680624&view=diff
==============================================================================
--- incubator/hama/trunk/src/java/org/apache/hama/RandomVariable.java (original)
+++ incubator/hama/trunk/src/java/org/apache/hama/RandomVariable.java Mon Jul
28 21:33:49 2008
@@ -19,6 +19,8 @@
*/
package org.apache.hama;
+import org.apache.hadoop.io.Text;
+
/**
* The RandomVaraibale Class provides static methods for generating random
* numbers.
@@ -49,6 +51,21 @@
}
/**
+ * Generate a random name.
+ *
+ * @return random name
+ */
+ protected static Text randMatrixName() {
+ String rName = HamaConstants.RANDOM;
+ for (int i = 1; i <= 5; i++) {
+ char ch = (char) ((Math.random() * 26) + 97);
+ rName += ch;
+ }
+
+ return new Text(rName);
+ }
+
+ /**
* Generate a random number from a uniform random variable.
*
* @param min min of the random variable.
Added: incubator/hama/trunk/src/java/overview.html
URL:
http://svn.apache.org/viewvc/incubator/hama/trunk/src/java/overview.html?rev=680624&view=auto
==============================================================================
--- incubator/hama/trunk/src/java/overview.html (added)
+++ incubator/hama/trunk/src/java/overview.html Mon Jul 28 21:33:49 2008
@@ -0,0 +1,23 @@
+<html>
+
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<body>
+The Hama Overview
+</body>
+</html>
Modified: incubator/hama/trunk/src/test/org/apache/hama/HamaTestCase.java
URL:
http://svn.apache.org/viewvc/incubator/hama/trunk/src/test/org/apache/hama/HamaTestCase.java?rev=680624&r1=680623&r2=680624&view=diff
==============================================================================
--- incubator/hama/trunk/src/test/org/apache/hama/HamaTestCase.java (original)
+++ incubator/hama/trunk/src/test/org/apache/hama/HamaTestCase.java Mon Jul 28
21:33:49 2008
@@ -29,17 +29,15 @@
public class HamaTestCase extends HBaseClusterTestCase {
static final Logger LOG = Logger.getLogger(HamaTestCase.class);
protected Matrix matrixA;
+ protected Matrix matrixB;
protected Text A = new Text("matrixA");
+ protected Text B = new Text("matrixB");
protected int SIZE = 5;
/** constructor */
public HamaTestCase() {
super();
- // TODO: We should remove this "hadoop.log.dir" path to build.xml
- System.setProperty("hadoop.log.dir",
- conf.get("hadoop.log.dir", "./build/test/logs"));
-
// Initializing the hbase configuration
conf.set("mapred.output.dir", conf.get("hadoop.tmp.dir"));
Modified: incubator/hama/trunk/src/test/org/apache/hama/TestFeatureVector.java
URL:
http://svn.apache.org/viewvc/incubator/hama/trunk/src/test/org/apache/hama/TestFeatureVector.java?rev=680624&r1=680623&r2=680624&view=diff
==============================================================================
--- incubator/hama/trunk/src/test/org/apache/hama/TestFeatureVector.java
(original)
+++ incubator/hama/trunk/src/test/org/apache/hama/TestFeatureVector.java Mon
Jul 28 21:33:49 2008
@@ -19,33 +19,60 @@
*/
package org.apache.hama;
+import org.apache.hadoop.io.Text;
+
public class TestFeatureVector extends HamaTestCase {
+ public void testAddition() {
+ matrixA = new Matrix(conf, A);
+ matrixA.set(0, 0, 2);
+ matrixA.set(0, 1, 5);
+
+ matrixB = new Matrix(conf, B);
+ matrixB.set(0, 0, 4);
+ matrixB.set(0, 1, 1);
+
+ FeatureVector v1 = new FeatureVector(matrixA.getRowResult(0));
+ FeatureVector v2 = new FeatureVector(matrixB.getRowResult(0));
+
+ FeatureVector v3 = v1.addition(v2);
+ assertTrue(v3.getValueAt(0) == 6.0);
+ assertTrue(v3.getValueAt(1) == 6.0);
+
+ LOG.info(v3.getValueAt(0));
+ LOG.info(v3.getValueAt(1));
+ matrixA.clear();
+ matrixB.clear();
+ }
/**
* Test cosine similarity
*/
public void testCosine() {
final double result = 0.6978227007909176;
- matrixA = new Matrix(conf, A);
-
- // TODO : We need setArray(int row, double[] value) to matrix
+ Matrix m1 = new Matrix(conf, new Text("cosine"));
+
+ // TODO : We need setArray(int row, double[] value) to matrix
// e.g. matrixA.setArray(0, new double[] {2,5,1,4});
// -- Edward
- matrixA.set(0, 0, 2);
- matrixA.set(0, 1, 5);
- matrixA.set(0, 2, 1);
- matrixA.set(0, 3, 4);
- matrixA.set(1, 0, 4);
- matrixA.set(1, 1, 1);
- matrixA.set(1, 2, 3);
- matrixA.set(1, 3, 3);
+ m1.set(0, 0, 2);
+ m1.set(0, 1, 5);
+ m1.set(0, 2, 1);
+ m1.set(0, 3, 4);
+
+ m1.set(1, 0, 4);
+ m1.set(1, 1, 1);
+ m1.set(1, 2, 3);
+ m1.set(1, 3, 3);
- FeatureVector v1 = matrixA.getRowVector(0);
- FeatureVector v2 = matrixA.getRowVector(1);
+ LOG.info("get test : " + m1.get(0, 0));
+ LOG.info("get test : " + m1.get(0, 1));
+
+ FeatureVector v1 = new FeatureVector(m1.getRowResult(0));
+ FeatureVector v2 = new FeatureVector(m1.getRowResult(1));
double cos = v1.getCosine(v2);
assertEquals(cos, result);
- matrixA.clear();
+ m1.close();
}
}
Modified: incubator/hama/trunk/src/test/org/apache/hama/TestMatrix.java
URL:
http://svn.apache.org/viewvc/incubator/hama/trunk/src/test/org/apache/hama/TestMatrix.java?rev=680624&r1=680623&r2=680624&view=diff
==============================================================================
--- incubator/hama/trunk/src/test/org/apache/hama/TestMatrix.java (original)
+++ incubator/hama/trunk/src/test/org/apache/hama/TestMatrix.java Mon Jul 28
21:33:49 2008
@@ -32,6 +32,7 @@
public void testRandomMatrix() {
Matrix rand = Matrix.random(conf, SIZE, SIZE);
assertTrue(rand.getRowDimension() == SIZE);
+ rand.close();
}
/**
@@ -59,21 +60,6 @@
}
/**
- * Constant matrix test
- */
- public void testConstantMatrix() {
- constantMatrixCreate();
-
- for (int i = 0; i < SIZE; i++) {
- for (int j = 0; j < SIZE; j++) {
- assertTrue(0.5 == matrixA.get(i, j));
- }
- }
-
- matrixClose();
- }
-
- /**
* Object clear
*/
public void matrixClose() {
@@ -95,20 +81,4 @@
return false;
}
}
-
- /**
- * Matrix create
- *
- * @return <code>true</code> if the matrix space was initialized.
- */
- public boolean constantMatrixCreate() {
- matrixA = new Matrix(conf, SIZE, SIZE, 0.5);
- try {
- return matrixA.admin.tableExists(A);
- } catch (MasterNotRunningException e) {
- e.printStackTrace();
- return false;
- }
- }
-
}
Modified: incubator/hama/trunk/src/test/org/apache/hama/TestRandomVariable.java
URL:
http://svn.apache.org/viewvc/incubator/hama/trunk/src/test/org/apache/hama/TestRandomVariable.java?rev=680624&r1=680623&r2=680624&view=diff
==============================================================================
--- incubator/hama/trunk/src/test/org/apache/hama/TestRandomVariable.java
(original)
+++ incubator/hama/trunk/src/test/org/apache/hama/TestRandomVariable.java Mon
Jul 28 21:33:49 2008
@@ -25,7 +25,7 @@
* Random variable generation test
*/
public class TestRandomVariable extends TestCase {
- final static int TEST_COUNT = 50;
+ final static int COUNT = 50;
/**
* Random object test
@@ -33,7 +33,7 @@
* @throws Exception
*/
public void testRand() throws Exception {
- for (int i = 0; i < TEST_COUNT; i++) {
+ for (int i = 0; i < COUNT; i++) {
double result = RandomVariable.rand();
assertTrue(result >= 0.0d && result <= 1.0);
}
@@ -48,7 +48,7 @@
final int min = 122;
final int max = 561;
- for (int i = 0; i < TEST_COUNT; i++) {
+ for (int i = 0; i < COUNT; i++) {
int result = RandomVariable.randInt(min, max);
assertTrue(result >= min && result <= max);
}
@@ -63,7 +63,7 @@
final double min = 1.0d;
final double max = 3.0d;
- for (int i = 0; i < TEST_COUNT; i++) {
+ for (int i = 0; i < COUNT; i++) {
double result = RandomVariable.uniform(min, max);
assertTrue(result >= min && result <= max);
}
Modified:
incubator/hama/trunk/src/test/org/apache/hama/mapred/TestMatrixMapReduce.java
URL:
http://svn.apache.org/viewvc/incubator/hama/trunk/src/test/org/apache/hama/mapred/TestMatrixMapReduce.java?rev=680624&r1=680623&r2=680624&view=diff
==============================================================================
---
incubator/hama/trunk/src/test/org/apache/hama/mapred/TestMatrixMapReduce.java
(original)
+++
incubator/hama/trunk/src/test/org/apache/hama/mapred/TestMatrixMapReduce.java
Mon Jul 28 21:33:49 2008
@@ -20,21 +20,24 @@
package org.apache.hama.mapred;
import java.io.IOException;
+import java.util.Iterator;
import java.util.Map;
import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.HStoreKey;
+import org.apache.hadoop.hbase.io.BatchUpdate;
+import org.apache.hadoop.hbase.io.Cell;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
-import org.apache.hadoop.io.MapWritable;
+import org.apache.hadoop.hbase.io.RowResult;
+import org.apache.hadoop.hbase.mapred.TableMap;
+import org.apache.hadoop.hbase.mapred.TableReduce;
import org.apache.hadoop.io.Text;
-import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reporter;
+import org.apache.hama.FeatureVector;
import org.apache.hama.HamaTestCase;
import org.apache.hama.Matrix;
-import org.apache.hama.algebra.AdditionSubtractionReduce;
import org.apache.log4j.Logger;
/**
@@ -42,77 +45,88 @@
*/
public class TestMatrixMapReduce extends HamaTestCase {
static final Logger LOG = Logger.getLogger(TestMatrixMapReduce.class);
- private Matrix a;
- private Matrix b;
- private Matrix c;
+ protected Matrix a;
+ protected Matrix b;
/** constructor */
public TestMatrixMapReduce() {
super();
}
- public static class AdditionMap extends MatrixMap<Text, MapWritable> {
+ public static class AdditionMap extends
+ TableMap<ImmutableBytesWritable, RowResult> {
protected Matrix matrix_b;
- public static final String MATRIX_B = "hama.addition.matrix.b";
+ public static final String MATRIX_B =
"hama.addition.substraction.matrix.b";
public void configure(JobConf job) {
- Text b_name = new Text(job.get(MATRIX_B, ""));
- matrix_b = new Matrix(new HBaseConfiguration(), b_name);
+ matrix_b = new Matrix(new HBaseConfiguration(), new Text("MatrixB"));
}
- public static void initJob(String matrix_a, String matrix_b,
- Class<AdditionMap> map, JobConf jobConf) {
- initJob(matrix_a, map, jobConf);
- jobConf.set(MATRIX_B, matrix_b);
+ @Override
+ public void map(ImmutableBytesWritable key, RowResult value,
+ OutputCollector<ImmutableBytesWritable, RowResult> output,
+ Reporter reporter) throws IOException {
+
+ FeatureVector v1 = new FeatureVector(matrix_b.getRowResult(key.get()));
+ FeatureVector v2 = new FeatureVector(value);
+ FeatureVector v3 = v1.addition(v2);
+ output.collect(key, v3.getRowResult(key.get()));
+
+ LOG.info("xxx" + v3.getValueAt(0));
+ LOG.info("xxx" + v3.getValueAt(1));
}
+ }
+
+ public static class AdditionReduce extends
+ TableReduce<ImmutableBytesWritable, RowResult> {
@Override
- public void map(HStoreKey key, MapWritable value,
- OutputCollector<Text, MapWritable> output, Reporter reporter)
- throws IOException {
- Text tKey = key.getRow();
- MapWritable sum = new MapWritable();
-
- for (Map.Entry<Writable, Writable> e : value.entrySet()) {
- double a = getDouble(e.getValue());
- double b = matrix_b.get(Integer.parseInt(tKey.toString()),
- getIndex((Text) e.getKey()));
- byte[] result = null;
- result = toBytes(a + b);
- sum.put(e.getKey(), new ImmutableBytesWritable(result));
+ public void reduce(ImmutableBytesWritable key, Iterator<RowResult> values,
+ OutputCollector<ImmutableBytesWritable, BatchUpdate> output,
+ Reporter reporter) throws IOException {
+
+ BatchUpdate b = new BatchUpdate(key.get());
+ RowResult r = values.next();
+ for (Map.Entry<byte[], Cell> f : r.entrySet()) {
+ b.put(f.getKey(), f.getValue().getValue());
}
- output.collect(tKey, sum);
+
+ output.collect(key, b);
}
}
public void testMatrixMapReduce() throws IOException {
- a = Matrix.random(conf, SIZE, SIZE);
- b = Matrix.random(conf, SIZE, SIZE);
- c = new Matrix(conf, new Text("matrixC"));
+ a = new Matrix(conf, new Text("MatrixA"));
+ a.set(0, 0, 1);
+ a.set(0, 1, 0);
+ b = new Matrix(conf, new Text("MatrixB"));
+ b.set(0, 0, 1);
+ b.set(0, 1, 1);
+
+ a.close();
+ b.close();
+
miniMRJob();
}
public void miniMRJob() throws IOException {
- JobConf jobConf = new JobConf(conf, TestMatrixMapReduce.class);
- jobConf.setJobName("test MR job");
- jobConf.setInputFormat(MatrixInputFormat.class);
- jobConf.setOutputFormat(MatrixOutputFormat.class);
- AdditionMap.initJob(a.getName(), b.getName(), AdditionMap.class,
jobConf);
- AdditionSubtractionReduce.initJob("matrixC",
- AdditionSubtractionReduce.class, jobConf);
-
- jobConf.setNumMapTasks(1);
- jobConf.setNumReduceTasks(1);
-
- JobClient.runJob(jobConf);
-
- assertEquals(c.getRowDimension(), SIZE);
- assertEquals(c.getColumnDimension(), SIZE);
-
- for(int i = 0; i < c.getRowDimension(); i++) {
- for(int j = 0; j < c.getColumnDimension(); j++) {
- assertEquals(c.get(i, j), (a.get(i, j) + b.get(i, j)));
- }
- }
+ Matrix c = new Matrix(conf, new Text("xanadu"));
+ c.close();
+
+ JobConf jobConf = new JobConf(conf, TestMatrixMapReduce.class);
+ jobConf.setJobName("test MR job");
+
+ TableMap.initJob("MatrixA", "column:", AdditionMap.class,
+ ImmutableBytesWritable.class, RowResult.class, jobConf);
+ TableReduce.initJob("xanadu", AdditionReduce.class, jobConf);
+
+ jobConf.setNumMapTasks(1);
+ jobConf.setNumReduceTasks(1);
+
+ JobClient.runJob(jobConf);
+
+ assertEquals(c.get(0, 0), 2.0);
+ assertEquals(c.get(0, 1), 1.0);
}
+
}