Revision: 4017
Author: [email protected]
Date: Fri Dec 3 08:47:28 2010
Log: Fixed the kettle test that was broken. It was trying to write to a
repository on a connection that didn't exist, so I gave it one.
http://code.google.com/p/power-architect/source/detail?r=4017
Added:
/trunk/regress/ca/sqlpower/architect/etl/kettle/ArchitectSessionWithFileValidator.java
/trunk/regress/ca/sqlpower/architect/etl/kettle/KettleJobOutputToRepoTest.java
Modified:
/trunk/regress/ca/sqlpower/architect/etl/kettle/CreateKettleJobTest.java
/trunk/src/main/java/ca/sqlpower/architect/etl/kettle/KettleJob.java
=======================================
--- /dev/null
+++
/trunk/regress/ca/sqlpower/architect/etl/kettle/ArchitectSessionWithFileValidator.java
Fri Dec 3 08:47:28 2010
@@ -0,0 +1,63 @@
+/*
+ * Copyright (c) 2010, SQL Power Group Inc.
+ *
+ * This file is part of SQL Power Architect.
+ *
+ * SQL Power Architect is free software; you can redistribute it and/or
modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * SQL Power Architect is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package ca.sqlpower.architect.etl.kettle;
+
+import ca.sqlpower.architect.ArchitectSessionContext;
+import ca.sqlpower.architect.TestingArchitectSession;
+import ca.sqlpower.architect.TestingArchitectSessionContext;
+import ca.sqlpower.util.UserPrompter;
+import ca.sqlpower.util.UserPrompter.UserPromptOptions;
+import ca.sqlpower.util.UserPrompter.UserPromptResponse;
+
+/**
+ * This implementation of the {...@link TestingArchitectSessionContext} is
used to allow the file
+ * validator return type to be specified by the test. This is for testing
how the saving of
+ * Kettle jobs reacts if the a file is canceled or selected to not be
saved.
+ */
+class ArchitectSessionWithFileValidator extends TestingArchitectSession {
+
+ private UserPromptResponse fvr;
+
+ public ArchitectSessionWithFileValidator(ArchitectSessionContext
context) {
+ super(context);
+ }
+
+ public void setReponse(UserPromptResponse fvr) {
+ this.fvr = fvr;
+ }
+
+ @Override
+ public UserPrompter createUserPrompter(String question, UserPromptType
responseType, UserPromptOptions optionType, UserPromptResponse
defaultResponseType,
+ Object defaultResponse, String ... buttonNames) {
+ return new UserPrompter() {
+ public Object getUserSelectedResponse() {
+ if (fvr == UserPromptResponse.OK) {
+ return true;
+ } else {
+ return false;
+ }
+ }
+
+ public UserPromptResponse promptUser(Object... formatArgs) {
+ return fvr;
+ }
+ };
+ }
+}
=======================================
--- /dev/null
+++
/trunk/regress/ca/sqlpower/architect/etl/kettle/KettleJobOutputToRepoTest.java
Fri Dec 3 08:47:28 2010
@@ -0,0 +1,237 @@
+/*
+ * Copyright (c) 2008, SQL Power Group Inc.
+ *
+ * This file is part of Power*Architect.
+ *
+ * Power*Architect is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * Power*Architect is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+package ca.sqlpower.architect.etl.kettle;
+
+import java.io.File;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.List;
+
+import junit.framework.TestCase;
+
+import org.pentaho.di.core.NotePadMeta;
+import org.pentaho.di.core.exception.KettleException;
+import org.pentaho.di.core.logging.LogWriter;
+import org.pentaho.di.job.JobMeta;
+import org.pentaho.di.repository.Repository;
+import org.pentaho.di.repository.RepositoryMeta;
+import org.pentaho.di.repository.UserInfo;
+import org.pentaho.di.trans.TransMeta;
+
+import ca.sqlpower.architect.TestingArchitectSessionContext;
+import ca.sqlpower.sql.JDBCDataSource;
+import ca.sqlpower.sql.JDBCDataSourceType;
+import ca.sqlpower.sqlobject.SQLDatabase;
+import ca.sqlpower.util.UserPrompter.UserPromptResponse;
+
+import com.enterprisedt.util.debug.Logger;
+
+public class KettleJobOutputToRepoTest extends TestCase {
+
+ /**
+ * This is a wrapper class for a repository to track what is being
done with it.
+ */
+ public class KettleRepositoryWrapper extends Repository{
+
+ int transformationsSaved = 0;
+ int jobsSaved = 0;
+
+ public KettleRepositoryWrapper(LogWriter log, RepositoryMeta
repinfo, UserInfo userinfo) {
+ super(log, repinfo, userinfo);
+ }
+
+ /**
+ * This method does not actually disconnect from the connection!
+ */
+ public int getNumTransformationsSaved() {
+ return transformationsSaved;
+ }
+
+ public int getNumJobsSaved() {
+ return jobsSaved;
+ }
+
+ @Override
+ public synchronized void insertJob(JobMeta jobMeta) throws
KettleException {
+ jobsSaved++;
+ super.insertJob(jobMeta);
+ }
+
+ @Override
+ public synchronized void insertTransformation(TransMeta transMeta)
throws KettleException {
+ transformationsSaved++;
+ super.insertTransformation(transMeta);
+ }
+
+ @Override
+ public long getTransformationID(String s, long l) {
+ return getValFromString(s);
+ }
+
+ @Override
+ public long getNextTransformationID() {
+ return 1;
+ }
+
+ @Override
+ public synchronized long getJobID(String name, long id_directory)
throws KettleException {
+ return getValFromString(name);
+ }
+
+ @Override
+ public long getNextJobID() {
+ return 1;
+ }
+
+ @Override
+ public long getDatabaseID(String s) {
+ return getValFromString(s);
+ }
+
+ private long getValFromString(String s) {
+ try {
+ return new Integer(s).intValue();
+ } catch (NumberFormatException e) {
+ return 1;
+ }
+ }
+ }
+
+ private static Logger logger =
Logger.getLogger(KettleJobOutputToRepoTest.class);
+
+ private SQLDatabase target;
+ private KettleJob kettleJob;
+ private JDBCDataSource ds;
+ private JDBCDataSourceType dsType;
+ private TransMeta transMeta;
+ private JobMeta job;
+ private KettleRepositoryWrapper krw;
+ private ArchitectSessionWithFileValidator session;
+ private List<TransMeta> transList;
+
+ @Override
+ protected void setUp() throws Exception {
+ super.setUp();
+
+ session = new ArchitectSessionWithFileValidator(new
TestingArchitectSessionContext());
+
+ }
+
+ public void createRepo(int n) {
+ // We are using a real HSQLDB database to test this functionality
because
+ // we need to format the repository and actually write to it.
+ target = new SQLDatabase();
+ target.setName("Target for Testing");
+ ds =
session.getContext().getPlDotIni().getDataSource("regression_test");
+ target.setDataSource(ds);
+ ds.setUser("sa");
+ dsType = ds.getParentType();
+
dsType.putProperty(KettleOptions.KETTLE_CONNECTION_TYPE_KEY, "Hypersonic");
+ ds.setUrl("jdbc:hsqldb:file:testKettleOutputToRepository" + n);
+
+
+ transMeta = createTransMeta();
+ job = createJobMeta();
+
+ transList = new ArrayList<TransMeta>();
+ transList.add(transMeta);
+
+
+ kettleJob = new KettleJob(session);
+ kettleJob.setSavingToFile(false);
+ kettleJob.setJobName("jobName");
+
+ kettleJob.setRepository(ds);
+
+ Object ret[] = kettleJob.createTestRepository();
+ krw = new KettleRepositoryWrapper((LogWriter)ret[0],
+ (RepositoryMeta)ret[1],
+ (UserInfo)ret[2]);
+
+ // This adds a whole bunch of tables needed to store the ETL
metadata in the
+ // target database.
+ logger.debug("Creating connection and setting up database");
+ try {
+ kettleJob.createStraightConnection(krw);
+ krw.createRepositorySchema(null, false, new
ArrayList<String>(), false);
+ } catch (KettleException e) {
+ throw new RuntimeException(e);
+ } catch (SQLException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ public void testOutputToRepositoryOverwrite() throws Exception {
+ createRepo(1);
+ session.setReponse(UserPromptResponse.OK);
+
+ kettleJob.outputToRepository(job, transList, krw);
+ assertEquals(1, krw.getNumTransformationsSaved());
+ assertEquals(1, krw.getNumJobsSaved());
+
+ deleteTestDatabase(1);
+ }
+
+ public void testOutputToRepositoryDontOverwrite() throws Exception {
+ createRepo(2);
+ session.setReponse(UserPromptResponse.NOT_OK);
+
+ kettleJob.outputToRepository(job, transList, krw);
+ assertEquals(0, krw.getNumTransformationsSaved());
+ assertEquals(0, krw.getNumJobsSaved());
+
+ deleteTestDatabase(2);
+ }
+
+ public void testOutputToRepositoryCancel() throws Exception {
+ createRepo(3);
+ session.setReponse(UserPromptResponse.CANCEL);
+
+ kettleJob.outputToRepository(job, transList, krw);
+ assertEquals(0, krw.getNumTransformationsSaved());
+ assertEquals(0, krw.getNumJobsSaved());
+
+ deleteTestDatabase(3);
+ }
+
+ private void deleteTestDatabase(int n) throws SQLException {
+ logger.debug("Deleting test database");
+ File f = new File("testKettleOutputToRepository" + n + ".log");
+ f.delete();
+ f = new File("testKettleOutputToRepository" + n + ".properties");
+ f.delete();
+ f = new File("testKettleOutputToRepository" + n + ".lck");
+ f.delete();
+ }
+
+ private JobMeta createJobMeta() {
+ LogWriter lw = LogWriter.getInstance();
+ JobMeta job = new JobMeta(lw);
+ job.setName("jobName");
+ job.addNote(new NotePadMeta("original job note", 0, 150, 125,
125));
+ return job;
+ }
+
+ private TransMeta createTransMeta() {
+ TransMeta transMeta = new TransMeta();
+ transMeta.setName("tableName");
+ transMeta.addNote(new NotePadMeta("original trans meta note", 0,
150, 125, 125));
+ return transMeta;
+ }
+}
=======================================
---
/trunk/regress/ca/sqlpower/architect/etl/kettle/CreateKettleJobTest.java
Wed Jul 14 14:56:34 2010
+++
/trunk/regress/ca/sqlpower/architect/etl/kettle/CreateKettleJobTest.java
Fri Dec 3 08:47:28 2010
@@ -26,10 +26,8 @@
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
-import java.sql.Connection;
import java.sql.SQLException;
import java.util.ArrayList;
-import java.util.Date;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
@@ -37,20 +35,15 @@
import junit.framework.TestCase;
import org.pentaho.di.core.NotePadMeta;
-import org.pentaho.di.core.database.Database;
import org.pentaho.di.core.database.DatabaseMeta;
-import org.pentaho.di.core.exception.KettleDatabaseException;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.logging.LogWriter;
import org.pentaho.di.job.JobMeta;
import org.pentaho.di.repository.Repository;
-import org.pentaho.di.repository.RepositoryDirectory;
-import org.pentaho.di.repository.RepositoryMeta;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.step.StepMeta;
import ca.sqlpower.architect.ArchitectSession;
-import ca.sqlpower.architect.ArchitectSessionContext;
import ca.sqlpower.architect.TestingArchitectSession;
import ca.sqlpower.architect.TestingArchitectSessionContext;
import ca.sqlpower.sql.JDBCDataSource;
@@ -60,46 +53,10 @@
import ca.sqlpower.sqlobject.SQLDatabase;
import ca.sqlpower.sqlobject.SQLObjectException;
import ca.sqlpower.sqlobject.SQLTable;
-import ca.sqlpower.util.UserPrompter;
-import ca.sqlpower.util.UserPrompter.UserPromptOptions;
import ca.sqlpower.util.UserPrompter.UserPromptResponse;
public class CreateKettleJobTest extends TestCase {
- /**
- * This implementation of the {...@link TestingArchitectSessionContext}
is used to allow the file
- * validator return type to be specified by the test. This is for
testing how the saving of
- * Kettle jobs reacts if the a file is canceled or selected to not be
saved.
- */
- private class ArchitectSessionContextWithFileValidator extends
TestingArchitectSession {
-
- private final UserPromptResponse fvr;
-
- public
ArchitectSessionContextWithFileValidator(ArchitectSessionContext context,
UserPromptResponse fvr) {
- super(context);
- this.fvr = fvr;
- }
-
- @Override
- public UserPrompter createUserPrompter(String question,
UserPromptType responseType, UserPromptOptions optionType,
UserPromptResponse defaultResponseType,
- Object defaultResponse, String ... buttonNames) {
- return new UserPrompter() {
-
- public Object getUserSelectedResponse() {
- if (fvr == UserPromptResponse.OK) {
- return true;
- } else {
- return false;
- }
- }
-
- public UserPromptResponse promptUser(Object... formatArgs)
{
- return fvr;
- }
- };
- }
- }
-
private SQLDatabase target;
private SQLTable targetTableNoSource;
private SQLTable targetTableMixedSource;
@@ -340,49 +297,6 @@
assertEquals(architectDS.getPass(), dbMeta.getPassword());
assertEquals(architectDS.getUser(), dbMeta.getUsername());
}
-
- public void testOutputToRepositoryOverwrite() throws Exception {
- testOutputToRepository(UserPromptResponse.OK);
- }
-
- public void testOutputToRepositoryDontOverwrite() throws Exception {
- testOutputToRepository(UserPromptResponse.NOT_OK);
- }
-
- public void testOutputToRepositoryCancel() throws Exception {
- testOutputToRepository(UserPromptResponse.CANCEL);
- }
-
- private void testOutputToRepository(final UserPromptResponse fvr)
throws Exception {
- TransMeta transMeta = createTransMeta();
- JobMeta job = createJobMeta();
-
- List<TransMeta> transList = new ArrayList<TransMeta>();
- transList.add(transMeta);
-
- KettleJob kettleJob = new KettleJob(new
ArchitectSessionContextWithFileValidator(new
TestingArchitectSessionContext(), fvr),
- new KettleRepositoryDirectoryChooser(){
- public RepositoryDirectory selectDirectory(Repository repo) {
- return new RepositoryDirectory();
- }
- });
- kettleJob.setRepository(new ArchitectDataSourceStub());
- kettleJob.setSavingToFile(false);
- kettleJob.setJobName("jobName");
- KettleRepositoryStub rep = new KettleRepositoryStub(new
RepositoryMeta("", "", null));
- kettleJob.outputToRepository(job, transList, rep);
-
- if (fvr == UserPromptResponse.NOT_OK || fvr ==
UserPromptResponse.CANCEL) {
- assertEquals(0, rep.getNumTransformationsSaved());
- assertEquals(0, rep.getNumJobsSaved());
- } else if (fvr == UserPromptResponse.OK) {
- assertEquals(1, rep.getNumTransformationsSaved());
- assertEquals(1, rep.getNumJobsSaved());
- } else {
- fail("Unknown user prompt response: " + fvr);
- }
- assertTrue(rep.getRepositoryDisconnected());
- }
/**
* This method tests the outputToXML method based on different
settings.
@@ -415,8 +329,12 @@
newJob.setName("jobName");
newJob.addNote(new NotePadMeta("new job note", 0, 150, 125, 125));
- KettleJob kettleJob = new KettleJob(new
ArchitectSessionContextWithFileValidator(new
TestingArchitectSessionContext(), fvr),
- new RootRepositoryDirectoryChooser());
+
+ ArchitectSessionWithFileValidator as =
+ new ArchitectSessionWithFileValidator(new
TestingArchitectSessionContext());
+ as.setReponse(fvr);
+
+ KettleJob kettleJob = new KettleJob(as, new
RootRepositoryDirectoryChooser());
kettleJob.setFilePath(jobOutputFile.getPath());
kettleJob.setSavingToFile(true);
kettleJob.setJobName("jobName");
@@ -474,200 +392,4 @@
return new File(outputFile.getParentFile().getPath() +
File.separator +
"transformation_for_table_" + name + ".ktr");
}
-
- /**
- * This is a stub of the Repository class from Kettle. This was made
to have a
- * stub repository to give to the outputToRepository method but there
are too
- * many methods and not all of them are stubbed. This means when you
create a
- * transformation to output to the repository you may not be able to
give a
- * complex transformation. Jobs should be fine to make complex.
- */
- private class KettleRepositoryStub extends Repository {
-
- KettleDatabaseStub db;
- boolean repositoryDisconnected = false;
- int transformationsSaved = 0;
- int jobsSaved = 0;
-
- public KettleRepositoryStub(RepositoryMeta repMeta) {
- super(null, repMeta, null);
- db = new KettleDatabaseStub();
- }
-
- public KettleDatabaseStub getDatabase() {
- return db;
- }
-
- /**
- * This method does not actually disconnect from the connection!
- */
- public void disconnect() {
- repositoryDisconnected = true;
- }
-
- public boolean getRepositoryDisconnected() {
- return repositoryDisconnected;
- }
-
- public int getNumTransformationsSaved() {
- return transformationsSaved;
- }
-
- public int getNumJobsSaved() {
- return jobsSaved;
- }
-
- public void refreshRepositoryDirectoryTree() {
- }
-
- public long getTransformationID(String s, long l) {
- return getValFromString(s);
- }
-
- public long getNextTransformationID() {
- return 1;
- }
-
- public synchronized void delAllFromTrans(long id) {
- }
-
- public void lockRepository() {
- }
-
- public synchronized long insertLogEntry(String s) {
- return 0;
- }
-
- public long getJobID(String s, RepositoryDirectory d) {
- return getValFromString(s);
- }
-
- public long getNextJobID() {
- return 1;
- }
-
- public synchronized void delAllFromJob(long id) {
- }
-
- public void insertJobNote(long l1, long l2) {
- }
-
- public synchronized void insertJob(long id_job, long id_directory,
String name, long id_database_log, String table_name_log,
- String modified_user, Date modified_date, boolean
useBatchId, boolean batchIdPassed, boolean logfieldUsed,
- String sharedObjectsFile, String description, String
extended_description, String version, int status,
- String created_user, Date created_date) throws
KettleException {
- jobsSaved++;
- }
-
- public synchronized long insertJobEntryCopy(long id_job, long
id_jobentry, long id_jobentry_type, int nr, long gui_location_x,
- long gui_location_y, boolean gui_draw, boolean parallel)
throws KettleException {
- return 1;
- }
-
- public synchronized long insertJobHop(long id_job, long
id_jobentry_copy_from, long id_jobentry_copy_to, boolean enabled,
- boolean evaluation, boolean unconditional) throws
KettleException {
- return 1;
- }
-
- public synchronized long insertNote(String note, long
gui_location_x, long gui_location_y, long gui_location_width,
- long gui_location_height) throws KettleException {
- return 1;
- }
-
- public synchronized long insertDatabase(String name, String type,
String access, String host, String dbname, String port,
- String user, String pass, String servername, String
data_tablespace, String index_tablespace)
- throws KettleDatabaseException {
- return 1;
- }
-
- public synchronized void updateDatabase(long id_database, String
name, String type, String access, String host, String dbname,
- String port, String user, String pass, String servername,
String data_tablespace, String index_tablespace)
- throws KettleDatabaseException {
- }
-
- public synchronized void delDatabaseAttributes(long id_database)
throws KettleDatabaseException {
- }
-
- public synchronized long insertDatabaseAttribute(long id_database,
String code, String value_str) throws KettleDatabaseException {
- return 1;
- }
-
- public synchronized void updateJobEntryTypes() throws
KettleException {
- }
-
- public long getJobEntryTypeID(String s) {
- return getValFromString(s);
- }
-
- public void commit() {
- }
-
- public void rollback() {
- }
-
- public void unlockRepository() {
- }
-
- public void clearNextIDCounters() {
- }
-
- public synchronized void updateStepTypes() throws KettleException {
- }
-
- public synchronized void
closeStepAttributeInsertPreparedStatement() throws KettleDatabaseException {
- }
-
- public long getDatabaseID(String s) {
- return getValFromString(s);
- }
-
- public synchronized void insertTransformation(TransMeta transMeta)
throws KettleDatabaseException {
- transformationsSaved++;
- }
-
- public synchronized void
closeTransAttributeInsertPreparedStatement() throws KettleDatabaseException
{
- }
-
- public synchronized void insertTransNote(long id_transformation,
long id_note) throws KettleException {
- }
-
- public synchronized long getJobID(String name, long id_directory)
throws KettleException {
- return 1;
- }
-
- private long getValFromString(String s) {
- try {
- return new Integer(s).intValue();
- } catch (NumberFormatException e) {
- return 1;
- }
- }
-
- }
-
- private class KettleDatabaseStub extends Database {
-
- public KettleDatabaseStub() {
- super(null);
- }
-
- public void setConnection(Connection conn) {
- }
- }
-
- /**
- * This is a data source that will always return null when it
- * tries to create a connection. Otherwise it is fully functional.
- */
- private class ArchitectDataSourceStub extends JDBCDataSource {
-
- public ArchitectDataSourceStub() {
- super(new PlDotIni());
- }
-
- public Connection createConnection() {
- return null;
- }
- }
-
-}
+}
=======================================
--- /trunk/src/main/java/ca/sqlpower/architect/etl/kettle/KettleJob.java
Tue May 25 13:03:30 2010
+++ /trunk/src/main/java/ca/sqlpower/architect/etl/kettle/KettleJob.java
Fri Dec 3 08:47:28 2010
@@ -456,6 +456,15 @@
logger.debug("Parent file path is " + parentPath);
return new File(parentPath, "transformation_for_table_" +
transName + ".ktr").getPath();
}
+
+ /**
+ * Pass the repository a connection straight as the Repository
+ * connect method loads its own drivers and we don't want to
+ * include them.
+ */
+ public void createStraightConnection(Repository repo) throws
KettleException, SQLException {
+
repo.getDatabase().setConnection(settings.getRepository().createConnection());
+ }
/**
* This method translates the list of SQLTables to a Kettle Job and
Transformations and saves
@@ -465,10 +474,7 @@
void outputToRepository(JobMeta jm, List<TransMeta> transformations,
Repository repo) throws KettleException, SQLException {
try {
- // Pass the repository a connection straight as the Repository
- // connect method loads its own drivers and we don't want to
- // include them.
-
repo.getDatabase().setConnection(settings.getRepository().createConnection());
+ createStraightConnection(repo);
RepositoryDirectory directory;
@@ -588,6 +594,28 @@
Repository repo = new Repository(lw, repoMeta, userInfo);
return repo;
+ }
+
+ /**
+ * This method returns the necessary data to create a repository, but
doesn't actually
+ * create it so that we create a testing repository.
+ */
+ public Object[] createTestRepository() {
+
+ DatabaseMeta kettleDBMeta =
KettleUtils.createDatabaseMeta(settings.getRepository());
+ RepositoryMeta repoMeta = new RepositoryMeta("", "", kettleDBMeta);
+
+ UserInfo userInfo = new
UserInfo(settings.getRepository().get(KettleOptions.KETTLE_REPOS_LOGIN_KEY),
+
settings.getRepository().get(KettleOptions.KETTLE_REPOS_PASSWORD_KEY),
+ settings.getJobName(), "", true, null);
+ LogWriter lw = LogWriter.getInstance(); // Repository constructor
needs this for some reason
+
+ Object[] ret = new Object[3];
+ ret[0] = (Object)lw;
+ ret[1] = repoMeta;
+ ret[2] = userInfo;
+ return ret;
+
}
/**