Baunsgaard commented on a change in pull request #919:
URL: https://github.com/apache/systemml/pull/919#discussion_r430215341
##########
File path:
src/main/java/org/apache/sysds/runtime/instructions/cp/VariableCPInstruction.java
##########
@@ -598,192 +545,116 @@ else if ( getInput1().getDataType() == DataType.SCALAR
){
break;
case RemoveVariableAndFile:
- // Remove the variable from HashMap _variables, and
possibly delete the data on disk.
- boolean del = ( (BooleanObject)
ec.getScalarInput(getInput2().getName(), getInput2().getValueType(), true)
).getBooleanValue();
- MatrixObject m = (MatrixObject)
ec.removeVariable(getInput1().getName());
-
- if ( !del ) {
- // HDFS file should be retailed after
clearData(),
- // therefore data must be exported if dirty
flag is set
- if ( m.isDirty() )
- m.exportData();
- }
- else {
- //throw new DMLRuntimeException("rmfilevar w/
true is not expected! " + instString);
- //cleanDataOnHDFS(pb, input1.getName());
- cleanDataOnHDFS( m );
- }
-
- // check if in-memory object can be cleaned up
- if ( !ec.getVariables().hasReferences(m) ) {
- // no other variable in the symbol table points
to the same Data object as that of input1.getName()
-
- //remove matrix object from cache
- m.clearData();
- }
-
+ processRemoveVariableAndFileInstruction(ec);
break;
case CastAsScalarVariable: //castAsScalarVariable
- if( getInput1().getDataType().isFrame() ) {
- FrameBlock fBlock =
ec.getFrameInput(getInput1().getName());
- if( fBlock.getNumRows()!=1 ||
fBlock.getNumColumns()!=1 )
- throw new
DMLRuntimeException("Dimension mismatch - unable to cast frame
'"+getInput1().getName()+"' of dimension ("+fBlock.getNumRows()+" x
"+fBlock.getNumColumns()+") to scalar.");
- Object value = fBlock.get(0,0);
- ec.releaseFrameInput(getInput1().getName());
- ec.setScalarOutput(output.getName(),
-
ScalarObjectFactory.createScalarObject(fBlock.getSchema()[0], value));
- }
- else if( getInput1().getDataType().isMatrix() ) {
- MatrixBlock mBlock =
ec.getMatrixInput(getInput1().getName());
- if( mBlock.getNumRows()!=1 ||
mBlock.getNumColumns()!=1 )
- throw new
DMLRuntimeException("Dimension mismatch - unable to cast matrix
'"+getInput1().getName()+"' of dimension ("+mBlock.getNumRows()+" x
"+mBlock.getNumColumns()+") to scalar.");
- double value = mBlock.getValue(0,0);
- ec.releaseMatrixInput(getInput1().getName());
- ec.setScalarOutput(output.getName(), new
DoubleObject(value));
- }
- else if( getInput1().getDataType().isTensor() ) {
- TensorBlock tBlock =
ec.getTensorInput(getInput1().getName());
- if (tBlock.getNumDims() != 2 ||
tBlock.getNumRows() != 1 || tBlock.getNumColumns() != 1)
- throw new
DMLRuntimeException("Dimension mismatch - unable to cast tensor '" +
getInput1().getName() + "' to scalar.");
- ValueType vt = !tBlock.isBasic() ?
tBlock.getSchema()[0] : tBlock.getValueType();
- ec.setScalarOutput(output.getName(),
ScalarObjectFactory
- .createScalarObject(vt, tBlock.get(new
int[] {0, 0})));
- ec.releaseTensorInput(getInput1().getName());
- }
- else if( getInput1().getDataType().isList() ) {
- //TODO handling of cleanup status, potentially
new object
- ListObject list =
(ListObject)ec.getVariable(getInput1().getName());
- ec.setVariable(output.getName(), list.slice(0));
- }
- else {
- throw new DMLRuntimeException("Unsupported data
type "
- + "in as.scalar():
"+getInput1().getDataType().name());
- }
+ processCastAsScalarVariableInstruction(ec);
break;
- case CastAsMatrixVariable:{
- if( getInput1().getDataType().isFrame() ) {
- FrameBlock fin =
ec.getFrameInput(getInput1().getName());
- MatrixBlock out =
DataConverter.convertToMatrixBlock(fin);
- ec.releaseFrameInput(getInput1().getName());
- ec.setMatrixOutput(output.getName(), out);
- }
- else if( getInput1().getDataType().isScalar() ) {
- ScalarObject scalarInput = ec.getScalarInput(
- getInput1().getName(),
getInput1().getValueType(), getInput1().isLiteral());
- MatrixBlock out = new
MatrixBlock(scalarInput.getDoubleValue());
- ec.setMatrixOutput(output.getName(), out);
- }
- else if( getInput1().getDataType().isList() ) {
- //TODO handling of cleanup status, potentially
new object
- ListObject list =
(ListObject)ec.getVariable(getInput1().getName());
- if( list.getLength() > 1 ) {
- if(
!list.checkAllDataTypes(DataType.SCALAR) )
- throw new
DMLRuntimeException("as.matrix over multi-entry list only allows scalars.");
- MatrixBlock out = new
MatrixBlock(list.getLength(), 1, false);
- for( int i=0; i<list.getLength(); i++ )
- out.quickSetValue(i, 0,
((ScalarObject)list.slice(i)).getDoubleValue());
- ec.setMatrixOutput(output.getName(),
out);
- }
- else {
- //pass through matrix input or create
1x1 matrix for scalar
- Data tmp = list.slice(0);
- if( tmp instanceof ScalarObject &&
tmp.getValueType()!=ValueType.STRING ) {
- MatrixBlock out = new
MatrixBlock(((ScalarObject)tmp).getDoubleValue());
-
ec.setMatrixOutput(output.getName(), out);
- }
- else {
-
ec.setVariable(output.getName(), tmp);
- }
- }
- }
- else {
- throw new DMLRuntimeException("Unsupported data
type "
- + "in as.matrix():
"+getInput1().getDataType().name());
- }
+
+ case CastAsMatrixVariable:
+ processCastAsMatrixVariableInstruction(ec);
break;
- }
- case CastAsFrameVariable:{
- FrameBlock out = null;
- if( getInput1().getDataType()==DataType.SCALAR ) {
- ScalarObject scalarInput =
ec.getScalarInput(getInput1());
- out = new FrameBlock(1,
getInput1().getValueType());
- out.ensureAllocatedColumns(1);
- out.set(0, 0, scalarInput.getStringValue());
- }
- else { //DataType.FRAME
- MatrixBlock min =
ec.getMatrixInput(getInput1().getName());
- out = DataConverter.convertToFrameBlock(min);
- ec.releaseMatrixInput(getInput1().getName());
- }
- ec.setFrameOutput(output.getName(), out);
+
+ case CastAsFrameVariable:
+ processCastAsFrameVariableInstruction(ec);
break;
- }
- case CastAsDoubleVariable:{
- ScalarObject in = ec.getScalarInput(getInput1());
- ec.setScalarOutput(output.getName(),
ScalarObjectFactory.castToDouble(in));
+
+ case CastAsDoubleVariable:
+ ScalarObject scalarDoubleInput =
ec.getScalarInput(getInput1());
+ ec.setScalarOutput(output.getName(),
ScalarObjectFactory.castToDouble(scalarDoubleInput));
break;
- }
- case CastAsIntegerVariable:{
- ScalarObject in = ec.getScalarInput(getInput1());
- ec.setScalarOutput(output.getName(),
ScalarObjectFactory.castToLong(in));
+
+ case CastAsIntegerVariable:
+ ScalarObject scalarLongInput =
ec.getScalarInput(getInput1());
+ ec.setScalarOutput(output.getName(),
ScalarObjectFactory.castToLong(scalarLongInput));
break;
- }
- case CastAsBooleanVariable:{
- ScalarObject scalarInput =
ec.getScalarInput(getInput1());
- ec.setScalarOutput(output.getName(), new
BooleanObject(scalarInput.getBooleanValue()));
+
+ case CastAsBooleanVariable:
+ ScalarObject scalarBooleanInput =
ec.getScalarInput(getInput1());
+ ec.setScalarOutput(output.getName(), new
BooleanObject(scalarBooleanInput.getBooleanValue()));
break;
- }
case Read:
- ScalarObject res = null;
- try {
- switch(getInput1().getValueType()) {
- case FP64:
- res = new
DoubleObject(HDFSTool.readDoubleFromHDFSFile(getInput2().getName()));
- break;
- case INT64:
- res = new
IntObject(HDFSTool.readIntegerFromHDFSFile(getInput2().getName()));
- break;
- case BOOLEAN:
- res = new
BooleanObject(HDFSTool.readBooleanFromHDFSFile(getInput2().getName()));
- break;
- case STRING:
- res = new
StringObject(HDFSTool.readStringFromHDFSFile(getInput2().getName()));
- break;
- default:
- throw new
DMLRuntimeException("Invalid value type ("
- +
getInput1().getValueType() + ") while processing readScalar instruction.");
- }
- } catch ( IOException e ) {
- throw new DMLRuntimeException(e);
- }
- ec.setScalarOutput(getInput1().getName(), res);
-
+ processReadInstruction(ec);
break;
case Write:
processWriteInstruction(ec);
break;
case SetFileName:
- Data data = ec.getVariable(getInput1().getName());
- if ( data.getDataType() == DataType.MATRIX ) {
- if (
getInput3().getName().equalsIgnoreCase("remote") ) {
-
((MatrixObject)data).setFileName(getInput2().getName());
- }
- else {
- throw new DMLRuntimeException("Invalid
location (" + getInput3().getName() + ") in SetFileName instruction: " +
instString);
- }
- } else{
- throw new DMLRuntimeException("Invalid data
type (" + getInput1().getDataType() + ") in SetFileName instruction: " +
instString);
- }
+ processSetFileNameInstruction(ec);
break;
default:
throw new DMLRuntimeException("Unknown opcode: " +
opcode );
}
}
+
+ /**
+ * Handler for processInstruction "CreateVariable" case
+ * @param ec execution context of the instruction
+ */
+ private void processCreateVariableInstruction(ExecutionContext ec){
+ //PRE: for robustness we cleanup existing variables, because a
setVariable
+ //would cause a buffer pool memory leak as these objects would
never be removed
+ if(ec.containsVariable(getInput1()))
+ processRemoveVariableInstruction(ec,
getInput1().getName());
+
+ if ( getInput1().getDataType() == DataType.MATRIX ) {
+ String fname = createUniqueFilename();
+ MatrixObject obj = new
MatrixObject(getInput1().getValueType(), fname);
+ setCacheableDataFields(obj);
+ obj.setUpdateType(_updateType);
+ obj.setMarkForLinCache(true);
+ ec.setVariable(getInput1().getName(), obj);
+ if(DMLScript.STATISTICS && _updateType.isInPlace())
+ Statistics.incrementTotalUIPVar();
+ }
+ else if( getInput1().getDataType() == DataType.TENSOR ) {
+ String fname = createUniqueFilename();
+ CacheableData<?> obj = new
TensorObject(getInput1().getValueType(), fname);
+ setCacheableDataFields(obj);
+ ec.setVariable(getInput1().getName(), obj);
+ }
+ else if( getInput1().getDataType() == DataType.FRAME ) {
+ String fname = createUniqueFilename();
+ FrameObject fobj = new FrameObject(fname);
+ setCacheableDataFields(fobj);
+ if( _schema != null )
+ fobj.setSchema(_schema); //after metadata
+ ec.setVariable(getInput1().getName(), fobj);
+ }
+ else if ( getInput1().getDataType() == DataType.SCALAR ){
+ //created variable not called for scalars
+ ec.setScalarOutput(getInput1().getName(), null);
+ }
+ else {
Review comment:
This could be a switch statement
##########
File path:
src/main/java/org/apache/sysds/runtime/instructions/cp/VariableCPInstruction.java
##########
@@ -898,20 +955,38 @@ else if( fmt == FileFormat.CSV )
else {
// Default behavior
MatrixObject mo =
ec.getMatrixObject(getInput1().getName());
-
mo.setPrivacyConstraints(getPrivacyConstraint());
mo.exportData(fname, fmtStr, _formatProperties);
}
+ // Set privacy constraint of write instruction to the
same as that of the input
+
setPrivacyConstraint(ec.getMatrixObject(getInput1().getName()).getPrivacyConstraint());
}
else if( getInput1().getDataType() == DataType.FRAME ) {
FrameObject mo =
ec.getFrameObject(getInput1().getName());
mo.exportData(fname, fmtStr, _formatProperties);
+ setPrivacyConstraint(mo.getPrivacyConstraint());
}
else if( getInput1().getDataType() == DataType.TENSOR ) {
Review comment:
Also is there a significance in either calling set PrivacyConstraint
before calling ExportData or not?
##########
File path:
src/main/java/org/apache/sysds/runtime/instructions/cp/VariableCPInstruction.java
##########
@@ -843,6 +714,192 @@ else if (object instanceof FrameObject)
+
((FrameObject)object).getNumColumns() + "," +
((FrameObject)object).getNumColumns() + "] to " + getInput3().getName());
}
}
+
+ /**
+ * Handler for RemoveVariableAndFile instruction
+ *
+ * @param ec execution context
+ */
+ private void processRemoveVariableAndFileInstruction(ExecutionContext
ec){
+ // Remove the variable from HashMap _variables, and possibly
delete the data on disk.
+ boolean del = ( (BooleanObject)
ec.getScalarInput(getInput2().getName(), getInput2().getValueType(), true)
).getBooleanValue();
+ MatrixObject m = (MatrixObject)
ec.removeVariable(getInput1().getName());
+
+ if ( !del ) {
+ // HDFS file should be retailed after clearData(),
+ // therefore data must be exported if dirty flag is set
+ if ( m.isDirty() )
+ m.exportData();
+ }
+ else {
+ //throw new DMLRuntimeException("rmfilevar w/ true is
not expected! " + instString);
+ //cleanDataOnHDFS(pb, input1.getName());
+ cleanDataOnHDFS( m );
+ }
+
+ // check if in-memory object can be cleaned up
+ if ( !ec.getVariables().hasReferences(m) ) {
+ // no other variable in the symbol table points to the
same Data object as that of input1.getName()
+
+ //remove matrix object from cache
+ m.clearData();
+ }
+ }
+
+ /**
+ * Process CastAsScalarVariable instruction.
+ * @param ec execution context
+ */
+ private void processCastAsScalarVariableInstruction(ExecutionContext
ec){
+ //TODO: Create privacy constraints for ScalarObject so that the
privacy constraints can be propagated to scalars as well.
+ blockIfInputPrivacyActivated(ec.getVariable(getInput1()));
+
+ if( getInput1().getDataType().isFrame() ) {
+ FrameBlock fBlock =
ec.getFrameInput(getInput1().getName());
+ if( fBlock.getNumRows()!=1 || fBlock.getNumColumns()!=1
)
+ throw new DMLRuntimeException("Dimension
mismatch - unable to cast frame '"+getInput1().getName()+"' of dimension
("+fBlock.getNumRows()+" x "+fBlock.getNumColumns()+") to scalar.");
+ Object value = fBlock.get(0,0);
+ ec.releaseFrameInput(getInput1().getName());
+ ec.setScalarOutput(output.getName(),
+
ScalarObjectFactory.createScalarObject(fBlock.getSchema()[0], value));
+ }
+ else if( getInput1().getDataType().isMatrix() ) {
+ MatrixBlock mBlock =
ec.getMatrixInput(getInput1().getName());
+ if( mBlock.getNumRows()!=1 || mBlock.getNumColumns()!=1
)
+ throw new DMLRuntimeException("Dimension
mismatch - unable to cast matrix '"+getInput1().getName()+"' of dimension
("+mBlock.getNumRows()+" x "+mBlock.getNumColumns()+") to scalar.");
+ double value = mBlock.getValue(0,0);
+ ec.releaseMatrixInput(getInput1().getName());
+ ec.setScalarOutput(output.getName(), new
DoubleObject(value));
+ }
+ else if( getInput1().getDataType().isTensor() ) {
+ TensorBlock tBlock =
ec.getTensorInput(getInput1().getName());
+ if (tBlock.getNumDims() != 2 || tBlock.getNumRows() !=
1 || tBlock.getNumColumns() != 1)
+ throw new DMLRuntimeException("Dimension
mismatch - unable to cast tensor '" + getInput1().getName() + "' to scalar.");
+ ValueType vt = !tBlock.isBasic() ?
tBlock.getSchema()[0] : tBlock.getValueType();
+ ec.setScalarOutput(output.getName(), ScalarObjectFactory
+ .createScalarObject(vt, tBlock.get(new int[]
{0, 0})));
+ ec.releaseTensorInput(getInput1().getName());
+ }
+ else if( getInput1().getDataType().isList() ) {
+ //TODO handling of cleanup status, potentially new
object
+ ListObject list =
(ListObject)ec.getVariable(getInput1().getName());
+ ec.setVariable(output.getName(), list.slice(0));
+ }
+ else {
Review comment:
Again could be switch statement, and if this is doing the same as the
last comment on if else.
why is there a separate method build in to check it?
##########
File path:
src/main/java/org/apache/sysds/runtime/instructions/cp/VariableCPInstruction.java
##########
@@ -843,6 +714,192 @@ else if (object instanceof FrameObject)
+
((FrameObject)object).getNumColumns() + "," +
((FrameObject)object).getNumColumns() + "] to " + getInput3().getName());
}
}
+
+ /**
+ * Handler for RemoveVariableAndFile instruction
+ *
+ * @param ec execution context
+ */
+ private void processRemoveVariableAndFileInstruction(ExecutionContext
ec){
+ // Remove the variable from HashMap _variables, and possibly
delete the data on disk.
+ boolean del = ( (BooleanObject)
ec.getScalarInput(getInput2().getName(), getInput2().getValueType(), true)
).getBooleanValue();
+ MatrixObject m = (MatrixObject)
ec.removeVariable(getInput1().getName());
+
+ if ( !del ) {
+ // HDFS file should be retailed after clearData(),
+ // therefore data must be exported if dirty flag is set
+ if ( m.isDirty() )
+ m.exportData();
+ }
+ else {
+ //throw new DMLRuntimeException("rmfilevar w/ true is
not expected! " + instString);
+ //cleanDataOnHDFS(pb, input1.getName());
+ cleanDataOnHDFS( m );
+ }
+
+ // check if in-memory object can be cleaned up
+ if ( !ec.getVariables().hasReferences(m) ) {
+ // no other variable in the symbol table points to the
same Data object as that of input1.getName()
+
+ //remove matrix object from cache
+ m.clearData();
+ }
+ }
+
+ /**
+ * Process CastAsScalarVariable instruction.
+ * @param ec execution context
+ */
+ private void processCastAsScalarVariableInstruction(ExecutionContext
ec){
+ //TODO: Create privacy constraints for ScalarObject so that the
privacy constraints can be propagated to scalars as well.
+ blockIfInputPrivacyActivated(ec.getVariable(getInput1()));
+
+ if( getInput1().getDataType().isFrame() ) {
+ FrameBlock fBlock =
ec.getFrameInput(getInput1().getName());
+ if( fBlock.getNumRows()!=1 || fBlock.getNumColumns()!=1
)
+ throw new DMLRuntimeException("Dimension
mismatch - unable to cast frame '"+getInput1().getName()+"' of dimension
("+fBlock.getNumRows()+" x "+fBlock.getNumColumns()+") to scalar.");
+ Object value = fBlock.get(0,0);
+ ec.releaseFrameInput(getInput1().getName());
+ ec.setScalarOutput(output.getName(),
+
ScalarObjectFactory.createScalarObject(fBlock.getSchema()[0], value));
+ }
+ else if( getInput1().getDataType().isMatrix() ) {
+ MatrixBlock mBlock =
ec.getMatrixInput(getInput1().getName());
+ if( mBlock.getNumRows()!=1 || mBlock.getNumColumns()!=1
)
+ throw new DMLRuntimeException("Dimension
mismatch - unable to cast matrix '"+getInput1().getName()+"' of dimension
("+mBlock.getNumRows()+" x "+mBlock.getNumColumns()+") to scalar.");
+ double value = mBlock.getValue(0,0);
+ ec.releaseMatrixInput(getInput1().getName());
+ ec.setScalarOutput(output.getName(), new
DoubleObject(value));
+ }
+ else if( getInput1().getDataType().isTensor() ) {
+ TensorBlock tBlock =
ec.getTensorInput(getInput1().getName());
+ if (tBlock.getNumDims() != 2 || tBlock.getNumRows() !=
1 || tBlock.getNumColumns() != 1)
+ throw new DMLRuntimeException("Dimension
mismatch - unable to cast tensor '" + getInput1().getName() + "' to scalar.");
+ ValueType vt = !tBlock.isBasic() ?
tBlock.getSchema()[0] : tBlock.getValueType();
+ ec.setScalarOutput(output.getName(), ScalarObjectFactory
+ .createScalarObject(vt, tBlock.get(new int[]
{0, 0})));
+ ec.releaseTensorInput(getInput1().getName());
+ }
+ else if( getInput1().getDataType().isList() ) {
+ //TODO handling of cleanup status, potentially new
object
+ ListObject list =
(ListObject)ec.getVariable(getInput1().getName());
+ ec.setVariable(output.getName(), list.slice(0));
+ }
+ else {
+ throw new DMLRuntimeException("Unsupported data type "
+ + "in as.scalar():
"+getInput1().getDataType().name());
+ }
+ }
+
+
+ /**
+ * Throw DMLPrivacyException if privacy is activated for the input
variable
+ * @param input variable for which the privacy constraint is checked
+ */
+ private void blockIfInputPrivacyActivated(Data input){
+ if ( input != null && (input instanceof CacheableData<?>)){
+ PrivacyConstraint privacyConstraintIn =
((CacheableData<?>) input).getPrivacyConstraint();
+ if ( privacyConstraintIn != null &&
(privacyConstraintIn.getPrivacyLevel() == PrivacyLevel.Private) ){
+ throw new DMLPrivacyException("Privacy
constraint cannot be propagated to scalar for input " + getInput1().getName());
+ }
+ }
Review comment:
so PrivacyConstaints is associated with CacheableData in general?
##########
File path: src/main/java/org/apache/sysds/runtime/privacy/PrivacyConstraint.java
##########
@@ -24,19 +24,26 @@
*/
public class PrivacyConstraint
{
- protected boolean _privacy = false;
+
+ public enum PrivacyLevel {
Review comment:
static
##########
File path:
src/main/java/org/apache/sysds/runtime/instructions/cp/VariableCPInstruction.java
##########
@@ -956,7 +1031,7 @@ private void writeCSVFile(ExecutionContext ec, String
fname) {
else {
mo.exportData(fname, outFmt,
_formatProperties);
}
- HDFSTool.writeMetaDataFile (fname + ".mtd",
mo.getValueType(), dc, FileFormat.CSV, _formatProperties);
+ HDFSTool.writeMetaDataFile (fname + ".mtd",
mo.getValueType(), dc, FileFormat.CSV, _formatProperties,
mo.getPrivacyConstraint());
Review comment:
you might want to remove your extra space
##########
File path:
src/test/java/org/apache/sysds/test/functions/privacy/FederatedWorkerHandlerTest.java
##########
@@ -0,0 +1,341 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.sysds.test.functions.privacy;
+
+import java.util.Arrays;
+
+import org.apache.sysds.api.DMLException;
+import org.apache.sysds.api.DMLScript;
+import org.apache.sysds.runtime.meta.MatrixCharacteristics;
+import org.apache.sysds.runtime.privacy.PrivacyConstraint;
+import org.apache.sysds.runtime.privacy.PrivacyConstraint.PrivacyLevel;
+import org.apache.sysds.test.AutomatedTestBase;
+import org.apache.sysds.test.TestConfiguration;
+import org.apache.sysds.test.TestUtils;
+import org.junit.Test;
+import org.apache.sysds.common.Types;
+import static java.lang.Thread.sleep;
+
+public class FederatedWorkerHandlerTest extends AutomatedTestBase {
Review comment:
Indentation needs fixing! TABS !
##########
File path:
src/test/java/org/apache/sysds/test/functions/privacy/MatrixMultiplicationPropagationTest.java
##########
@@ -46,26 +47,36 @@ public void setUp() {
}
@Test
- public void testMatrixMultiplicationPropagation() throws JSONException {
- matrixMultiplicationPropagation(true, true);
+ public void testMatrixMultiplicationPropagationPrivate() throws
JSONException {
Review comment:
I see the JSON throwing is here as well.
##########
File path:
src/test/java/org/apache/sysds/test/functions/privacy/FederatedL2SVMTest.java
##########
@@ -0,0 +1,386 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.sysds.test.functions.privacy;
+
+import org.junit.Test;
+import org.apache.sysds.api.DMLException;
+import org.apache.sysds.api.DMLScript;
+import org.apache.sysds.common.Types;
+import org.apache.sysds.runtime.meta.MatrixCharacteristics;
+import org.apache.sysds.runtime.privacy.PrivacyConstraint;
+import org.apache.sysds.runtime.privacy.PrivacyConstraint.PrivacyLevel;
+import org.apache.sysds.test.AutomatedTestBase;
+import org.apache.sysds.test.TestConfiguration;
+import org.apache.sysds.test.TestUtils;
+import org.apache.wink.json4j.JSONException;
+
+import java.util.HashMap;
+import java.util.Map;
+
[email protected]
+public class FederatedL2SVMTest extends AutomatedTestBase {
+
+ private final static String TEST_DIR = "functions/federated/";
+ private final static String TEST_NAME = "FederatedL2SVMTest";
+ private final static String TEST_CLASS_DIR = TEST_DIR +
FederatedL2SVMTest.class.getSimpleName() + "/";
+
+ private final static int blocksize = 1024;
+ private int rows = 100;
+ private int cols = 10;
+
+ @Override
+ public void setUp() {
+ TestUtils.clearAssertionInformation();
+ addTestConfiguration(TEST_NAME, new
TestConfiguration(TEST_CLASS_DIR, TEST_NAME, new String[] {"Z"}));
+ }
+
+ // PrivateAggregation Single Input
+
+ @Test
+ public void federatedL2SVMCPPrivateAggregationX1() throws JSONException
{
+ Map<String, PrivacyConstraint> privacyConstraints = new
HashMap<>();
+ privacyConstraints.put("X1", new
PrivacyConstraint(PrivacyLevel.PrivateAggregation));
+ federatedL2SVMNoException(Types.ExecMode.SINGLE_NODE,
privacyConstraints, null, PrivacyLevel.PrivateAggregation);
+ }
+
+ @Test
+ public void federatedL2SVMCPPrivateAggregationX2() throws JSONException
{
+ Map<String, PrivacyConstraint> privacyConstraints = new
HashMap<>();
+ privacyConstraints.put("X2", new
PrivacyConstraint(PrivacyLevel.PrivateAggregation));
+ federatedL2SVMNoException(Types.ExecMode.SINGLE_NODE,
privacyConstraints, null, PrivacyLevel.PrivateAggregation);
+ }
+
+ @Test
+ public void federatedL2SVMCPPrivateAggregationY() throws JSONException {
+ Map<String, PrivacyConstraint> privacyConstraints = new
HashMap<>();
+ privacyConstraints.put("Y", new
PrivacyConstraint(PrivacyLevel.PrivateAggregation));
+ federatedL2SVMNoException(Types.ExecMode.SINGLE_NODE,
privacyConstraints, null, PrivacyLevel.PrivateAggregation);
+ }
+
+ // Private Single Input
+
+ @Test
+ public void federatedL2SVMCPPrivateFederatedX1() throws JSONException {
+ Map<String, PrivacyConstraint> privacyConstraints = new
HashMap<>();
+ privacyConstraints.put("X1", new
PrivacyConstraint(PrivacyLevel.Private));
+ federatedL2SVM(Types.ExecMode.SINGLE_NODE, privacyConstraints,
null, PrivacyLevel.Private, false, null, true, DMLException.class);
+ }
+
+ @Test
+ public void federatedL2SVMCPPrivateFederatedX2() throws JSONException {
+ Map<String, PrivacyConstraint> privacyConstraints = new
HashMap<>();
+ privacyConstraints.put("X2", new
PrivacyConstraint(PrivacyLevel.Private));
+ federatedL2SVM(Types.ExecMode.SINGLE_NODE, privacyConstraints,
null, PrivacyLevel.Private, false, null, true, DMLException.class);
+ }
+
+ @Test
+ public void federatedL2SVMCPPrivateFederatedY() throws JSONException {
+ Map<String, PrivacyConstraint> privacyConstraints = new
HashMap<>();
+ privacyConstraints.put("Y", new
PrivacyConstraint(PrivacyLevel.Private));
+ federatedL2SVMNoException(Types.ExecMode.SINGLE_NODE,
privacyConstraints, null, PrivacyLevel.Private);
+ }
+
+ // Setting Privacy of Matrix (Throws Exception)
+
+ @Test
+ public void federatedL2SVMCPPrivateMatrixX1() throws JSONException {
+ Map<String, PrivacyConstraint> privacyConstraints = new
HashMap<>();
+ privacyConstraints.put("X1", new
PrivacyConstraint(PrivacyLevel.Private));
+ federatedL2SVM(Types.ExecMode.SINGLE_NODE, null,
privacyConstraints, PrivacyLevel.Private, true, DMLException.class, false,
null);
+ }
+
+ @Test
+ public void federatedL2SVMCPPrivateMatrixX2() throws JSONException {
+ Map<String, PrivacyConstraint> privacyConstraints = new
HashMap<>();
+ privacyConstraints.put("X2", new
PrivacyConstraint(PrivacyLevel.Private));
+ federatedL2SVM(Types.ExecMode.SINGLE_NODE, null,
privacyConstraints, PrivacyLevel.Private, true, DMLException.class, false,
null);
+ }
+
+ @Test
+ public void federatedL2SVMCPPrivateMatrixY() throws JSONException {
+ Map<String, PrivacyConstraint> privacyConstraints = new
HashMap<>();
+ privacyConstraints.put("Y", new
PrivacyConstraint(PrivacyLevel.Private));
+ federatedL2SVM(Types.ExecMode.SINGLE_NODE, null,
privacyConstraints, PrivacyLevel.Private, true, DMLException.class, false,
null);
+ }
+
+ @Test
+ public void federatedL2SVMCPPrivateFederatedAndMatrixX1() throws
JSONException {
+ Map<String, PrivacyConstraint> privacyConstraints = new
HashMap<>();
+ privacyConstraints.put("X1", new
PrivacyConstraint(PrivacyLevel.Private));
+ federatedL2SVM(Types.ExecMode.SINGLE_NODE, privacyConstraints,
privacyConstraints, PrivacyLevel.Private, true, DMLException.class, true,
DMLException.class);
+ }
+
+ @Test
+ public void federatedL2SVMCPPrivateFederatedAndMatrixX2() throws
JSONException {
+ Map<String, PrivacyConstraint> privacyConstraints = new
HashMap<>();
+ privacyConstraints.put("X2", new
PrivacyConstraint(PrivacyLevel.Private));
+ federatedL2SVM(Types.ExecMode.SINGLE_NODE, privacyConstraints,
privacyConstraints, PrivacyLevel.Private, true, DMLException.class, true,
DMLException.class);
+ }
+
+ @Test
+ public void federatedL2SVMCPPrivateFederatedAndMatrixY() throws
JSONException {
+ Map<String, PrivacyConstraint> privacyConstraints = new
HashMap<>();
+ privacyConstraints.put("Y", new
PrivacyConstraint(PrivacyLevel.Private));
+ federatedL2SVM(Types.ExecMode.SINGLE_NODE, privacyConstraints,
privacyConstraints, PrivacyLevel.Private, true, DMLException.class, false,
null);
+ }
+
+ // Privacy Level Private Combinations
+
+ @Test
+ public void federatedL2SVMCPPrivateFederatedX1X2() throws JSONException
{
+ Map<String, PrivacyConstraint> privacyConstraints = new
HashMap<>();
+ privacyConstraints.put("X1", new
PrivacyConstraint(PrivacyLevel.Private));
+ privacyConstraints.put("X2", new
PrivacyConstraint(PrivacyLevel.Private));
+ federatedL2SVM(Types.ExecMode.SINGLE_NODE, privacyConstraints,
null, PrivacyLevel.Private, false, null, true, DMLException.class);
+ }
+
+ @Test
+ public void federatedL2SVMCPPrivateFederatedX1Y() throws JSONException {
+ Map<String, PrivacyConstraint> privacyConstraints = new
HashMap<>();
+ privacyConstraints.put("X1", new
PrivacyConstraint(PrivacyLevel.Private));
+ privacyConstraints.put("Y", new
PrivacyConstraint(PrivacyLevel.Private));
+ federatedL2SVM(Types.ExecMode.SINGLE_NODE, privacyConstraints,
null, PrivacyLevel.Private, false, null, true, DMLException.class);
+ }
+
+ @Test
+ public void federatedL2SVMCPPrivateFederatedX2Y() throws JSONException {
+ Map<String, PrivacyConstraint> privacyConstraints = new
HashMap<>();
+ privacyConstraints.put("X2", new
PrivacyConstraint(PrivacyLevel.Private));
+ privacyConstraints.put("Y", new
PrivacyConstraint(PrivacyLevel.Private));
+ federatedL2SVM(Types.ExecMode.SINGLE_NODE, privacyConstraints,
null, PrivacyLevel.Private, false, null, true, DMLException.class);
+ }
+
+ @Test
+ public void federatedL2SVMCPPrivateFederatedX1X2Y() throws
JSONException {
+ Map<String, PrivacyConstraint> privacyConstraints = new
HashMap<>();
+ privacyConstraints.put("X1", new
PrivacyConstraint(PrivacyLevel.Private));
+ privacyConstraints.put("X2", new
PrivacyConstraint(PrivacyLevel.Private));
+ privacyConstraints.put("Y", new
PrivacyConstraint(PrivacyLevel.Private));
+ federatedL2SVM(Types.ExecMode.SINGLE_NODE, privacyConstraints,
null, PrivacyLevel.Private, false, null, true, DMLException.class);
+ }
+
+ // Privacy Level PrivateAggregation Combinations
+ @Test
+ public void federatedL2SVMCPPrivateAggregationFederatedX1X2() throws
JSONException {
+ Map<String, PrivacyConstraint> privacyConstraints = new
HashMap<>();
+ privacyConstraints.put("X1", new
PrivacyConstraint(PrivacyLevel.PrivateAggregation));
+ privacyConstraints.put("X2", new
PrivacyConstraint(PrivacyLevel.PrivateAggregation));
+ federatedL2SVMNoException(Types.ExecMode.SINGLE_NODE,
privacyConstraints, null, PrivacyLevel.PrivateAggregation);
+ }
+
+ @Test
+ public void federatedL2SVMCPPrivateAggregationFederatedX1Y() throws
JSONException {
+ Map<String, PrivacyConstraint> privacyConstraints = new
HashMap<>();
+ privacyConstraints.put("X1", new
PrivacyConstraint(PrivacyLevel.PrivateAggregation));
+ privacyConstraints.put("Y", new
PrivacyConstraint(PrivacyLevel.PrivateAggregation));
+ federatedL2SVMNoException(Types.ExecMode.SINGLE_NODE,
privacyConstraints, null, PrivacyLevel.PrivateAggregation);
+ }
+
+ @Test
+ public void federatedL2SVMCPPrivateAggregationFederatedX2Y() throws
JSONException {
+ Map<String, PrivacyConstraint> privacyConstraints = new
HashMap<>();
+ privacyConstraints.put("X2", new
PrivacyConstraint(PrivacyLevel.PrivateAggregation));
+ privacyConstraints.put("Y", new
PrivacyConstraint(PrivacyLevel.PrivateAggregation));
+ federatedL2SVMNoException(Types.ExecMode.SINGLE_NODE,
privacyConstraints, null, PrivacyLevel.PrivateAggregation);
+ }
+
+ @Test
+ public void federatedL2SVMCPPrivateAggregationFederatedX1X2Y() throws
JSONException {
+ Map<String, PrivacyConstraint> privacyConstraints = new
HashMap<>();
+ privacyConstraints.put("X1", new
PrivacyConstraint(PrivacyLevel.PrivateAggregation));
+ privacyConstraints.put("X2", new
PrivacyConstraint(PrivacyLevel.PrivateAggregation));
+ privacyConstraints.put("Y", new
PrivacyConstraint(PrivacyLevel.PrivateAggregation));
+ federatedL2SVMNoException(Types.ExecMode.SINGLE_NODE,
privacyConstraints, null, PrivacyLevel.PrivateAggregation);
+ }
+
+ // Privacy Level Combinations
+ @Test
+ public void federatedL2SVMCPPrivatePrivateAggregationFederatedX1X2()
throws JSONException {
+ Map<String, PrivacyConstraint> privacyConstraints = new
HashMap<>();
+ privacyConstraints.put("X1", new
PrivacyConstraint(PrivacyLevel.Private));
+ privacyConstraints.put("X2", new
PrivacyConstraint(PrivacyLevel.PrivateAggregation));
+ federatedL2SVM(Types.ExecMode.SINGLE_NODE, privacyConstraints,
null, PrivacyLevel.Private, false, null, true, DMLException.class);
+ }
+
+ @Test
+ public void federatedL2SVMCPPrivatePrivateAggregationFederatedX1Y()
throws JSONException {
+ Map<String, PrivacyConstraint> privacyConstraints = new
HashMap<>();
+ privacyConstraints.put("X1", new
PrivacyConstraint(PrivacyLevel.Private));
+ privacyConstraints.put("Y", new
PrivacyConstraint(PrivacyLevel.PrivateAggregation));
+ federatedL2SVM(Types.ExecMode.SINGLE_NODE, privacyConstraints,
null, PrivacyLevel.Private, false, null, true, DMLException.class);
+ }
+
+ @Test
+ public void federatedL2SVMCPPrivatePrivateAggregationFederatedX2Y()
throws JSONException {
+ Map<String, PrivacyConstraint> privacyConstraints = new
HashMap<>();
+ privacyConstraints.put("X2", new
PrivacyConstraint(PrivacyLevel.Private));
+ privacyConstraints.put("Y", new
PrivacyConstraint(PrivacyLevel.PrivateAggregation));
+ federatedL2SVM(Types.ExecMode.SINGLE_NODE, privacyConstraints,
null, PrivacyLevel.Private, false, null, true, DMLException.class);
+ }
+
+ @Test
+ public void federatedL2SVMCPPrivatePrivateAggregationFederatedYX1()
throws JSONException {
+ Map<String, PrivacyConstraint> privacyConstraints = new
HashMap<>();
+ privacyConstraints.put("Y", new
PrivacyConstraint(PrivacyLevel.Private));
+ privacyConstraints.put("X1", new
PrivacyConstraint(PrivacyLevel.PrivateAggregation));
+ federatedL2SVMNoException(Types.ExecMode.SINGLE_NODE,
privacyConstraints, null, PrivacyLevel.Private);
+ }
+
+ @Test
+ public void federatedL2SVMCPPrivatePrivateAggregationFederatedYX2()
throws JSONException {
+ Map<String, PrivacyConstraint> privacyConstraints = new
HashMap<>();
+ privacyConstraints.put("Y", new
PrivacyConstraint(PrivacyLevel.Private));
+ privacyConstraints.put("X2", new
PrivacyConstraint(PrivacyLevel.PrivateAggregation));
+ federatedL2SVMNoException(Types.ExecMode.SINGLE_NODE,
privacyConstraints, null, PrivacyLevel.Private);
+ }
+
+ @Test
+ public void federatedL2SVMCPPrivatePrivateAggregationFederatedX2X1()
throws JSONException {
+ Map<String, PrivacyConstraint> privacyConstraints = new
HashMap<>();
+ privacyConstraints.put("X2", new
PrivacyConstraint(PrivacyLevel.Private));
+ privacyConstraints.put("X1", new
PrivacyConstraint(PrivacyLevel.PrivateAggregation));
+ federatedL2SVM(Types.ExecMode.SINGLE_NODE, privacyConstraints,
null, PrivacyLevel.Private, false, null, true, DMLException.class);
+ }
+
+ // Require Federated Workers to return matrix
+
+ @Test
+ public void federatedL2SVMCPPrivateAggregationX1Exception() throws
JSONException {
+ this.rows = 1000;
+ this.cols = 1;
+ Map<String, PrivacyConstraint> privacyConstraints = new
HashMap<>();
+ privacyConstraints.put("X1", new
PrivacyConstraint(PrivacyLevel.PrivateAggregation));
+ federatedL2SVM(Types.ExecMode.SINGLE_NODE, privacyConstraints,
null, PrivacyLevel.PrivateAggregation, false, null, true, DMLException.class);
+ }
+
+ @Test
+ public void federatedL2SVMCPPrivateAggregationX2Exception() throws
JSONException {
+ this.rows = 1000;
+ this.cols = 1;
+ Map<String, PrivacyConstraint> privacyConstraints = new
HashMap<>();
+ privacyConstraints.put("X2", new
PrivacyConstraint(PrivacyLevel.PrivateAggregation));
+ federatedL2SVM(Types.ExecMode.SINGLE_NODE, privacyConstraints,
null, PrivacyLevel.PrivateAggregation, false, null, true, DMLException.class);
+ }
+
+ @Test
+ public void federatedL2SVMCPPrivateX1Exception() throws JSONException {
+ this.rows = 1000;
+ this.cols = 1;
+ Map<String, PrivacyConstraint> privacyConstraints = new
HashMap<>();
+ privacyConstraints.put("X1", new
PrivacyConstraint(PrivacyLevel.Private));
+ federatedL2SVM(Types.ExecMode.SINGLE_NODE, privacyConstraints,
null, PrivacyLevel.Private, false, null, true, DMLException.class);
+ }
+
+ @Test
+ public void federatedL2SVMCPPrivateX2Exception() throws JSONException {
Review comment:
Every method in this class throws JSONException. Can this be caught,
since i worry that if for some reason it is thrown we get the odd error of fork
crashed in our tests.?
##########
File path:
src/test/java/org/apache/sysds/test/functions/privacy/MatrixRuntimePropagationTest.java
##########
@@ -0,0 +1,127 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.sysds.test.functions.privacy;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
+import org.apache.sysds.parser.DataExpression;
+import org.apache.sysds.runtime.meta.MatrixCharacteristics;
+import org.apache.sysds.runtime.privacy.PrivacyConstraint;
+import org.apache.sysds.runtime.privacy.PrivacyConstraint.PrivacyLevel;
+import org.apache.sysds.test.AutomatedTestBase;
+import org.apache.sysds.test.TestConfiguration;
+import org.apache.sysds.test.TestUtils;
+import org.apache.wink.json4j.JSONException;
+import org.junit.Test;
+
+public class MatrixRuntimePropagationTest extends AutomatedTestBase {
Review comment:
Indentation!
##########
File path:
src/main/java/org/apache/sysds/runtime/instructions/cp/VariableCPInstruction.java
##########
@@ -898,20 +955,38 @@ else if( fmt == FileFormat.CSV )
else {
// Default behavior
MatrixObject mo =
ec.getMatrixObject(getInput1().getName());
-
mo.setPrivacyConstraints(getPrivacyConstraint());
mo.exportData(fname, fmtStr, _formatProperties);
}
+ // Set privacy constraint of write instruction to the
same as that of the input
+
setPrivacyConstraint(ec.getMatrixObject(getInput1().getName()).getPrivacyConstraint());
}
else if( getInput1().getDataType() == DataType.FRAME ) {
FrameObject mo =
ec.getFrameObject(getInput1().getName());
mo.exportData(fname, fmtStr, _formatProperties);
+ setPrivacyConstraint(mo.getPrivacyConstraint());
}
else if( getInput1().getDataType() == DataType.TENSOR ) {
Review comment:
you could make a switch here again
##########
File path:
src/test/java/org/apache/sysds/test/functions/privacy/MatrixRuntimePropagationTest.java
##########
@@ -0,0 +1,127 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.sysds.test.functions.privacy;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
+import org.apache.sysds.parser.DataExpression;
+import org.apache.sysds.runtime.meta.MatrixCharacteristics;
+import org.apache.sysds.runtime.privacy.PrivacyConstraint;
+import org.apache.sysds.runtime.privacy.PrivacyConstraint.PrivacyLevel;
+import org.apache.sysds.test.AutomatedTestBase;
+import org.apache.sysds.test.TestConfiguration;
+import org.apache.sysds.test.TestUtils;
+import org.apache.wink.json4j.JSONException;
+import org.junit.Test;
+
+public class MatrixRuntimePropagationTest extends AutomatedTestBase {
+
+ private static final String TEST_DIR = "functions/privacy/";
+ private final static String TEST_CLASS_DIR = TEST_DIR +
MatrixMultiplicationPropagationTest.class.getSimpleName() + "/";
+ private final int m = 20;
+ private final int n = 20;
+ private final int k = 20;
Review comment:
most of the time k refers to thread count inside SystemDS, when i
started reading this test, i assumed it was the same here, but it is a matrix
size variable.
##########
File path:
src/main/java/org/apache/sysds/runtime/instructions/cp/VariableCPInstruction.java
##########
@@ -956,7 +1031,7 @@ private void writeCSVFile(ExecutionContext ec, String
fname) {
else {
mo.exportData(fname, outFmt,
_formatProperties);
}
- HDFSTool.writeMetaDataFile (fname + ".mtd",
mo.getValueType(), dc, FileFormat.CSV, _formatProperties);
+ HDFSTool.writeMetaDataFile (fname + ".mtd",
mo.getValueType(), dc, FileFormat.CSV, _formatProperties,
mo.getPrivacyConstraint());
Review comment:
before File format
##########
File path: src/main/java/org/apache/sysds/runtime/privacy/PrivacyConstraint.java
##########
@@ -24,19 +24,26 @@
*/
public class PrivacyConstraint
{
- protected boolean _privacy = false;
+
+ public enum PrivacyLevel {
Review comment:
searching around our code base leads to ... everyone doing it this way
... hmm, and here i thought i could find many examples of static enums around
in the code. but clearly everyone does it this way. I guess just ignore the
comment :P
##########
File path:
src/test/java/org/apache/sysds/test/functions/privacy/FederatedWorkerHandlerTest.java
##########
@@ -0,0 +1,341 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.sysds.test.functions.privacy;
+
+import java.util.Arrays;
+
+import org.apache.sysds.api.DMLException;
+import org.apache.sysds.api.DMLScript;
+import org.apache.sysds.runtime.meta.MatrixCharacteristics;
+import org.apache.sysds.runtime.privacy.PrivacyConstraint;
+import org.apache.sysds.runtime.privacy.PrivacyConstraint.PrivacyLevel;
+import org.apache.sysds.test.AutomatedTestBase;
+import org.apache.sysds.test.TestConfiguration;
+import org.apache.sysds.test.TestUtils;
+import org.junit.Test;
+import org.apache.sysds.common.Types;
+import static java.lang.Thread.sleep;
+
+public class FederatedWorkerHandlerTest extends AutomatedTestBase {
Review comment:
Tabs?
you could if you want to be extra nice, try to use the codeformatter.
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]