jeongyooneo closed pull request #36: [NEMO-94] Clean up legacy TODOs
URL: https://github.com/apache/incubator-nemo/pull/36
This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:
As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):
diff --git a/common/src/main/java/edu/snu/nemo/common/StateMachine.java
b/common/src/main/java/edu/snu/nemo/common/StateMachine.java
index 5d6c529f..f98c622d 100644
--- a/common/src/main/java/edu/snu/nemo/common/StateMachine.java
+++ b/common/src/main/java/edu/snu/nemo/common/StateMachine.java
@@ -16,16 +16,15 @@
package edu.snu.nemo.common;
import edu.snu.nemo.common.exception.IllegalStateTransitionException;
-
-import java.util.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import java.util.*;
+
/**
* A finite state machine that can be created with user defined states and
transitions.
*/
-// TODO #171: Enable StateMachine with Handlers for State Transitions
-// TODO #306: StateMachine Visualization
+// TODO #17: StateMachine Visualization
public final class StateMachine {
private static final Logger LOG =
LoggerFactory.getLogger(StateMachine.class.getName());
diff --git
a/common/src/main/java/edu/snu/nemo/common/ir/executionproperty/ExecutionProperty.java
b/common/src/main/java/edu/snu/nemo/common/ir/executionproperty/ExecutionProperty.java
index 9c7112f2..45249ae4 100644
---
a/common/src/main/java/edu/snu/nemo/common/ir/executionproperty/ExecutionProperty.java
+++
b/common/src/main/java/edu/snu/nemo/common/ir/executionproperty/ExecutionProperty.java
@@ -83,7 +83,7 @@ public final int hashCode() {
*/
public enum Key {
// Applies to IREdge
- DataCommunicationPattern, // TODO #492: modularizing runtime components
for data communication pattern.
+ DataCommunicationPattern,
DataFlowModel,
DataStore,
MetricCollection,
diff --git
a/compiler/frontend/beam/src/main/java/edu/snu/nemo/compiler/frontend/beam/transform/WindowTransform.java
b/compiler/frontend/beam/src/main/java/edu/snu/nemo/compiler/frontend/beam/transform/WindowTransform.java
index 38607573..f686eefc 100644
---
a/compiler/frontend/beam/src/main/java/edu/snu/nemo/compiler/frontend/beam/transform/WindowTransform.java
+++
b/compiler/frontend/beam/src/main/java/edu/snu/nemo/compiler/frontend/beam/transform/WindowTransform.java
@@ -44,7 +44,7 @@ public void prepare(final Context context, final
OutputCollector<T> oc) {
@Override
public void onData(final T element) {
- // TODO #36: Actually assign windows
+ // TODO #1: Support Beam Streaming in Compiler.
outputCollector.emit(element);
}
diff --git
a/compiler/frontend/spark/src/main/java/edu/snu/nemo/compiler/frontend/spark/core/SparkFrontendUtils.java
b/compiler/frontend/spark/src/main/java/edu/snu/nemo/compiler/frontend/spark/core/SparkFrontendUtils.java
index 8ad00d31..6c4c3955 100644
---
a/compiler/frontend/spark/src/main/java/edu/snu/nemo/compiler/frontend/spark/core/SparkFrontendUtils.java
+++
b/compiler/frontend/spark/src/main/java/edu/snu/nemo/compiler/frontend/spark/core/SparkFrontendUtils.java
@@ -92,7 +92,7 @@ public static Serializer deriveSerializerFrom(final
org.apache.spark.SparkContex
final DAGBuilder<IRVertex, IREdge> builder = new DAGBuilder<>(dag);
// save result in a temporary file
- // TODO #740: remove this part, and make it properly transfer with
executor.
+ // TODO #16: Implement collection of data from executor to client
final String resultFile = System.getProperty("user.dir") +
"/collectresult";
final IRVertex collectVertex = new OperatorVertex(new
CollectTransform<>(resultFile));
@@ -108,12 +108,12 @@ public static Serializer deriveSerializerFrom(final
org.apache.spark.SparkContex
JobLauncher.launchDAG(builder.build());
// Retrieve result data from file.
- // TODO #740: remove this part, and make it properly transfer with
executor.
+ // TODO #16: Implement collection of data from executor to client
try {
final List<T> result = new ArrayList<>();
Integer i = 0;
- // TODO #740: remove this part, and make it properly transfer with
executor.
+ // TODO #16: Implement collection of data from executor to client
File file = new File(resultFile + i);
while (file.exists()) {
try (
diff --git
a/compiler/frontend/spark/src/main/java/edu/snu/nemo/compiler/frontend/spark/transform/CollectTransform.java
b/compiler/frontend/spark/src/main/java/edu/snu/nemo/compiler/frontend/spark/transform/CollectTransform.java
index 6cdd7ecd..c1df3ba6 100644
---
a/compiler/frontend/spark/src/main/java/edu/snu/nemo/compiler/frontend/spark/transform/CollectTransform.java
+++
b/compiler/frontend/spark/src/main/java/edu/snu/nemo/compiler/frontend/spark/transform/CollectTransform.java
@@ -50,7 +50,7 @@ public void prepare(final Context context, final
OutputCollector<T> oc) {
@Override
public void onData(final T element) {
// Write result to a temporary file.
- // TODO #740: remove this part, and make it properly transfer with
executor.
+ // TODO #16: Implement collection of data from executor to client
list.add(element);
}
diff --git
a/compiler/frontend/spark/src/main/java/edu/snu/nemo/compiler/frontend/spark/transform/ReduceTransform.java
b/compiler/frontend/spark/src/main/java/edu/snu/nemo/compiler/frontend/spark/transform/ReduceTransform.java
index 853d8e0b..c7dace1c 100644
---
a/compiler/frontend/spark/src/main/java/edu/snu/nemo/compiler/frontend/spark/transform/ReduceTransform.java
+++
b/compiler/frontend/spark/src/main/java/edu/snu/nemo/compiler/frontend/spark/transform/ReduceTransform.java
@@ -99,7 +99,7 @@ public void onData(final T element) {
@Override
public void close() {
// Write result to a temporary file.
- // TODO #711: remove this part, and make it properly write to sink.
+ // TODO #16: Implement collection of data from executor to client.
try {
final Kryo kryo = new Kryo();
final Output output = new Output(new FileOutputStream(filename));
diff --git
a/compiler/optimizer/src/main/java/edu/snu/nemo/compiler/optimizer/pass/compiletime/reshaping/LoopOptimizations.java
b/compiler/optimizer/src/main/java/edu/snu/nemo/compiler/optimizer/pass/compiletime/reshaping/LoopOptimizations.java
index 380a3010..79f605c2 100644
---
a/compiler/optimizer/src/main/java/edu/snu/nemo/compiler/optimizer/pass/compiletime/reshaping/LoopOptimizations.java
+++
b/compiler/optimizer/src/main/java/edu/snu/nemo/compiler/optimizer/pass/compiletime/reshaping/LoopOptimizations.java
@@ -231,7 +231,7 @@ private LoopVertex mergeLoopVertices(final Set<LoopVertex>
loopVertices) {
*/
private Boolean checkEqualityOfIntPredicates(final IntPredicate
predicate1, final IntPredicate predicate2,
final Integer
numberToTestUntil) {
- // TODO #223: strengthen this bit of code where terminationCondition has
to be checked for convergence.
+ // TODO #11: Generalize Equality of Int Predicates for Loops.
if (numberToTestUntil.equals(0)) {
return predicate1.test(numberToTestUntil) ==
predicate2.test(numberToTestUntil);
} else if (predicate1.test(numberToTestUntil) !=
predicate2.test(numberToTestUntil)) {
diff --git
a/examples/spark/src/test/java/edu/snu/nemo/examples/spark/SparkJavaITCase.java
b/examples/spark/src/test/java/edu/snu/nemo/examples/spark/SparkJavaITCase.java
index 15d2dcaa..8aa1d48a 100644
---
a/examples/spark/src/test/java/edu/snu/nemo/examples/spark/SparkJavaITCase.java
+++
b/examples/spark/src/test/java/edu/snu/nemo/examples/spark/SparkJavaITCase.java
@@ -138,7 +138,7 @@ public void testSparkSQLExample() throws Exception {
final String inputFileJson = fileBasePath + peopleJson;
final String inputFileTxt = fileBasePath + peopleTxt;
- // TODO#412: Enable this after implementation of RDDs.
+ // TODO#12: Frontend support for Scala Spark.
// JobLauncher.main(builder
// .addJobId(JavaSparkSQLExample.class.getSimpleName() + "_test")
// .addUserMain(JavaSparkSQLExample.class.getCanonicalName())
diff --git a/pom.xml b/pom.xml
index 54a57f62..95939f09 100644
--- a/pom.xml
+++ b/pom.xml
@@ -35,7 +35,7 @@ limitations under the License.
<reef.version>0.17.0-SNAPSHOT</reef.version>
<protobuf.version>2.5.0</protobuf.version>
<hadoop.version>2.7.2</hadoop.version>
- <log4j.configuration>file://log4j.properties</log4j.configuration>
<!-- TODO: issue#645 -->
+ <log4j.configuration>file://log4j.properties</log4j.configuration>
<netty.version>4.1.16.Final</netty.version>
<guava.version>19.0</guava.version>
<grpc.version>1.7.0</grpc.version>
diff --git
a/runtime/common/src/main/java/edu/snu/nemo/runtime/common/message/local/LocalMessageContext.java
b/runtime/common/src/main/java/edu/snu/nemo/runtime/common/message/local/LocalMessageContext.java
index 8f4f0217..64ce6c9d 100644
---
a/runtime/common/src/main/java/edu/snu/nemo/runtime/common/message/local/LocalMessageContext.java
+++
b/runtime/common/src/main/java/edu/snu/nemo/runtime/common/message/local/LocalMessageContext.java
@@ -28,8 +28,8 @@
private Object replyMessage;
/**
- * TODO #119.
- * @param senderId TODO #119.
+ * TODO #10: Handle Method Javadocs Requirements for Checkstyle Warnings.
+ * @param senderId TODO #10: Handle Method Javadocs Requirements for
Checkstyle Warnings.
*/
LocalMessageContext(final String senderId) {
this.senderId = senderId;
@@ -45,8 +45,8 @@ public String getSenderId() {
}
/**
- * TODO #119.
- * @return TODO #119.
+ * TODO #10: Handle Method Javadocs Requirements for Checkstyle Warnings.
+ * @return TODO #10: Handle Method Javadocs Requirements for Checkstyle
Warnings.
*/
public Optional<Object> getReplyMessage() {
return Optional.ofNullable(replyMessage);
diff --git
a/runtime/common/src/main/java/edu/snu/nemo/runtime/common/message/ncs/NcsMessageEnvironment.java
b/runtime/common/src/main/java/edu/snu/nemo/runtime/common/message/ncs/NcsMessageEnvironment.java
index 296a19e5..50211b32 100644
---
a/runtime/common/src/main/java/edu/snu/nemo/runtime/common/message/ncs/NcsMessageEnvironment.java
+++
b/runtime/common/src/main/java/edu/snu/nemo/runtime/common/message/ncs/NcsMessageEnvironment.java
@@ -182,7 +182,6 @@ public void onException(final Throwable throwable,
*
* Not sure these variable names are conventionally used in RPC frameworks...
* Let's revisit them when we work on
- * TODO #206: Rethink/Refactor NCS as our RPC stack
*/
enum MessageType {
Send,
diff --git a/runtime/common/src/main/proto/ControlMessage.proto
b/runtime/common/src/main/proto/ControlMessage.proto
index 343b8e54..cc883dc9 100644
--- a/runtime/common/src/main/proto/ControlMessage.proto
+++ b/runtime/common/src/main/proto/ControlMessage.proto
@@ -81,7 +81,7 @@ message BlockStateChangedMsg {
}
message DataSizeMetricMsg {
- // TODO #511: Refactor metric aggregation for (general) run-rime
optimization.
+ // TODO #96: Modularize DataSkewPolicy to use MetricVertex and
BarrierVertex.
repeated PartitionSizeEntry partitionSize = 1;
required string blockId = 2;
required string srcIRVertexId = 3;
diff --git
a/runtime/executor/src/main/java/edu/snu/nemo/runtime/executor/datatransfer/InputReader.java
b/runtime/executor/src/main/java/edu/snu/nemo/runtime/executor/datatransfer/InputReader.java
index c275e04c..91298269 100644
---
a/runtime/executor/src/main/java/edu/snu/nemo/runtime/executor/datatransfer/InputReader.java
+++
b/runtime/executor/src/main/java/edu/snu/nemo/runtime/executor/datatransfer/InputReader.java
@@ -40,7 +40,6 @@
/**
* Represents the input data transfer to a task.
- * TODO #492: Modularize the data communication pattern.
*/
public final class InputReader extends DataTransfer {
private final int dstTaskIndex;
diff --git
a/runtime/master/src/main/java/edu/snu/nemo/runtime/master/RuntimeMaster.java
b/runtime/master/src/main/java/edu/snu/nemo/runtime/master/RuntimeMaster.java
index f3d75fc1..a424eccb 100644
---
a/runtime/master/src/main/java/edu/snu/nemo/runtime/master/RuntimeMaster.java
+++
b/runtime/master/src/main/java/edu/snu/nemo/runtime/master/RuntimeMaster.java
@@ -291,7 +291,7 @@ private void handleControlMessage(final
ControlMessage.Message message) {
throw new RuntimeException(exception);
case DataSizeMetric:
final ControlMessage.DataSizeMetricMsg dataSizeMetricMsg =
message.getDataSizeMetricMsg();
- // TODO #511: Refactor metric aggregation for (general) run-rime
optimization.
+ // TODO #96: Modularize DataSkewPolicy to use MetricVertex and
BarrierVertex.
accumulateBarrierMetric(dataSizeMetricMsg.getPartitionSizeList(),
dataSizeMetricMsg.getSrcIRVertexId(),
dataSizeMetricMsg.getBlockId());
break;
@@ -309,8 +309,8 @@ private void handleControlMessage(final
ControlMessage.Message message) {
/**
* Accumulates the metric data for a barrier vertex.
- * TODO #511: Refactor metric aggregation for (general) run-rime
optimization.
- * TODO #513: Replace MetricCollectionBarrierVertex with a Customizable
IRVertex.
+ * TODO #96: Modularize DataSkewPolicy to use MetricVertex and BarrierVertex.
+ * TODO #98: Implement MetricVertex that collect metric used for dynamic
optimization.
*
* @param partitionSizeInfo the size of partitions in a block to accumulate.
* @param srcVertexId the ID of the source vertex.
@@ -344,7 +344,6 @@ private void accumulateBarrierMetric(final
List<ControlMessage.PartitionSizeEntr
}
}
- // TODO #164: Cleanup Protobuf Usage
private static TaskState.State convertTaskState(final
ControlMessage.TaskStateFromExecutor state) {
switch (state) {
case READY:
@@ -379,9 +378,8 @@ private void accumulateBarrierMetric(final
List<ControlMessage.PartitionSizeEntr
/**
* Schedules a periodic DAG logging thread.
- * TODO #58: Web UI (Real-time visualization)
* @param jobStateManager for the job the DAG should be logged.
- *
+ * TODO #20: RESTful APIs to Access Job State and Metric.
* @return the scheduled executor service.
*/
private ScheduledExecutorService scheduleDagLogging(final JobStateManager
jobStateManager) {
diff --git
a/runtime/master/src/main/java/edu/snu/nemo/runtime/master/resource/ContainerManager.java
b/runtime/master/src/main/java/edu/snu/nemo/runtime/master/resource/ContainerManager.java
index 05a50da8..32d4bd9c 100644
---
a/runtime/master/src/main/java/edu/snu/nemo/runtime/master/resource/ContainerManager.java
+++
b/runtime/master/src/main/java/edu/snu/nemo/runtime/master/resource/ContainerManager.java
@@ -25,6 +25,9 @@
import org.apache.reef.driver.evaluator.EvaluatorRequest;
import org.apache.reef.driver.evaluator.EvaluatorRequestor;
import org.apache.reef.tang.Configuration;
+import org.apache.reef.tang.annotations.Parameter;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import javax.annotation.concurrent.NotThreadSafe;
import javax.inject.Inject;
@@ -33,10 +36,6 @@
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
-import org.apache.reef.tang.annotations.Parameter;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
/**
* (WARNING) This class is not thread-safe.
* Only a single thread should use the methods of this class.
@@ -46,7 +45,6 @@
* Serves as a single point of container management in Runtime.
* We define a unit of resource a container (an evaluator in REEF), and launch
a single executor on each container.
*/
-// TODO #60: Specify Types in Requesting Containers
// We need an overall cleanup of this class after #60 is resolved.
@DriverSide
@NotThreadSafe
diff --git
a/tests/src/test/java/edu/snu/nemo/tests/runtime/common/plan/DAGConverterTest.java
b/tests/src/test/java/edu/snu/nemo/tests/runtime/common/plan/DAGConverterTest.java
index 22d875ef..671fdb09 100644
---
a/tests/src/test/java/edu/snu/nemo/tests/runtime/common/plan/DAGConverterTest.java
+++
b/tests/src/test/java/edu/snu/nemo/tests/runtime/common/plan/DAGConverterTest.java
@@ -151,7 +151,6 @@ public void testComplexPlan() throws Exception {
irDAGBuilder.addVertex(v6);
irDAGBuilder.addVertex(v8);
- // TODO #13: Implement Join Node
// irDAGBuilder.addVertex(v7);
final IREdge e1 = new
IREdge(DataCommunicationPatternProperty.Value.OneToOne, v1, v2);
@@ -221,7 +220,6 @@ public void testComplexPlan() throws Exception {
final Stage stage4 = sortedLogicalDAG.get(3);
final Stage stage5 = sortedLogicalDAG.get(3);
- // TODO #148: Optimize Stage Partitioner Algorithm in NemoBackend
// The following asserts depend on how stage partitioning is defined; test
must be rewritten accordingly.
// assertEquals(logicalDAG.getVertices().size(), 5);
// assertEquals(logicalDAG.getIncomingEdgesOf(stage1).size(), 0);
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
With regards,
Apache Git Services