This is an automated email from the ASF dual-hosted git repository.

sergeykamov pushed a commit to branch NLPCRAFT-41
in repository https://gitbox.apache.org/repos/asf/incubator-nlpcraft.git


The following commit(s) were added to refs/heads/NLPCRAFT-41 by this push:
     new 0d94a1e  WIP.
0d94a1e is described below

commit 0d94a1ed207b9065e26aeca60679cfe55baf4b42
Author: Sergey Kamov <[email protected]>
AuthorDate: Thu Aug 27 22:25:01 2020 +0300

    WIP.
---
 .../nlpcraft/common/inspections/NCInspection.scala |  61 +++
 .../inspections/NCInspectionType.scala}            |  18 +-
 .../inspections/NCInspector.scala}                 |  11 +-
 .../org/apache/nlpcraft/probe/NCProbeBoot.scala    |   3 +
 .../nlpcraft/probe/mgrs/NCModelDecorator.scala     |   4 +-
 .../nlpcraft/probe/mgrs/cmd/NCCommandManager.scala |  33 +-
 .../probe/mgrs/conn/NCConnectionManager.scala      |  12 +-
 .../probe/mgrs/deploy/NCDeployManager.scala        |  34 +-
 .../inspections/NCProbeInspectionManager.scala     |  55 +++
 .../inspectors/NCInspectorIntents.scala}           |  28 +-
 .../inspections/inspectors/NCInspectorMacros.scala |  42 ++
 .../inspectors/NCInspectorSynonyms.scala           |  63 +++
 .../NCInspectorSynonymsSuggestions.scala           |  49 ++
 .../nlpcraft/probe/mgrs/model/NCModelManager.scala |  47 +-
 .../org/apache/nlpcraft/server/NCServer.scala      |   6 +-
 .../nlpcraft/server/mdo/NCProbeModelMdo.scala      |   5 +-
 .../nlpcraft/server/model/NCEnhanceManager.scala   | 524 ---------------------
 .../server/model/NCServerInspectorManager.scala    |  92 ++++
 .../NCInspectorSynonymsSuggestions.scala           | 401 ++++++++++++++++
 .../opencensus/NCOpenCensusServerStats.scala       |   2 +-
 .../nlpcraft/server/probe/NCProbeManager.scala     |  94 +++-
 .../nlpcraft/server/rest/NCBasicRestApi.scala      | 100 ++--
 22 files changed, 1006 insertions(+), 678 deletions(-)

diff --git 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/common/inspections/NCInspection.scala
 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/common/inspections/NCInspection.scala
new file mode 100644
index 0000000..da4a65a
--- /dev/null
+++ 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/common/inspections/NCInspection.scala
@@ -0,0 +1,61 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.nlpcraft.common.inspections
+
+import java.util
+import scala.collection.JavaConverters._
+
+/**
+  * Note that suggestions and data can be simple type or java collections to 
be transfer between server and probe as JSON
+  */
+case class NCInspection(
+    errors: Option[Seq[String]] = None,
+    warnings: Option[Seq[String]] = None,
+    suggestions: Option[Seq[AnyRef]] = None,
+
+    // Information for next inspection layer.
+    data: Option[AnyRef] = None
+) {
+    def serialize(): java.util.Map[String, AnyRef] = {
+        val m: util.Map[String, AnyRef] = new java.util.HashMap[String, AnyRef]
+
+        m.put("errors", errors.getOrElse(Seq.empty).asJava)
+        m.put("warnings", warnings.getOrElse(Seq.empty).asJava)
+        m.put("suggestions", suggestions.getOrElse(Seq.empty).asJava)
+        m.put("data", data.orNull)
+
+        m
+    }
+}
+
+object NCInspection {
+    def deserialize(m: util.Map[String, AnyRef]): NCInspection = {
+        def getSeq(name: String): Option[Seq[String]] = {
+            val seq = m.get(name).asInstanceOf[java.util.List[String]]
+
+            if (seq.isEmpty) None else Some(seq.asScala)
+        }
+
+        NCInspection(
+            errors = getSeq("errors"),
+            warnings = getSeq("warnings"),
+            suggestions = getSeq("suggestions"),
+            data = Option(m.get("data"))
+        )
+    }
+}
diff --git 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/deploy/NCModelHolder.scala
 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/common/inspections/NCInspectionType.scala
similarity index 68%
rename from 
nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/deploy/NCModelHolder.scala
rename to 
nlpcraft/src/main/scala/org/apache/nlpcraft/common/inspections/NCInspectionType.scala
index 372f890..595ba7a 100644
--- 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/deploy/NCModelHolder.scala
+++ 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/common/inspections/NCInspectionType.scala
@@ -15,17 +15,13 @@
  * limitations under the License.
  */
 
-package org.apache.nlpcraft.probe.mgrs.deploy
+package org.apache.nlpcraft.common.inspections
 
-import org.apache.nlpcraft.model.NCModel
+object NCInspectionType extends Enumeration {
+    type NCInspectionType = Value
 
-/**
-  * Holder for the model and its intent samepls.
- *
-  * @param model Model.
-  * @param intentSamples Map of intent samples.
-  */
-case class NCModelHolder(model: NCModel, intentSamples: Map[String, 
Seq[String]]) {
-    require(model != null)
-    require(intentSamples != null)
+    val SUGGEST_SYNONYMS: Value = Value
+    val INSPECTION_MACROS: Value = Value
+    val INSPECTION_SYNONYMS: Value = Value
+    val INSPECTION_INTENTS: Value = Value
 }
diff --git 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/model/NCEnhanceType.scala 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/common/inspections/NCInspector.scala
similarity index 76%
rename from 
nlpcraft/src/main/scala/org/apache/nlpcraft/server/model/NCEnhanceType.scala
rename to 
nlpcraft/src/main/scala/org/apache/nlpcraft/common/inspections/NCInspector.scala
index 147a638..74788b3 100644
--- 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/model/NCEnhanceType.scala
+++ 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/common/inspections/NCInspector.scala
@@ -15,13 +15,10 @@
  * limitations under the License.
  */
 
-package org.apache.nlpcraft.server.model
+package org.apache.nlpcraft.common.inspections
 
-object NCEnhanceType extends Enumeration {
-    type NCEnhanceType = Value
+import io.opencensus.trace.Span
 
-    val SUGGEST_SYNONYMS = Value
-    val VALIDATION_MACROS = Value
-    val VALIDATION_SYNONYMS = Value
-    val VALIDATION_INTENTS = Value
+trait NCInspector {
+    def inspect(mdlId: String, data: Option[AnyRef] = None, parent: Span = 
null): NCInspection
 }
diff --git 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/NCProbeBoot.scala 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/NCProbeBoot.scala
index b583100..ea55a1b 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/NCProbeBoot.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/NCProbeBoot.scala
@@ -36,6 +36,7 @@ import org.apache.nlpcraft.probe.mgrs.conn.NCConnectionManager
 import org.apache.nlpcraft.probe.mgrs.conversation.NCConversationManager
 import org.apache.nlpcraft.probe.mgrs.deploy.NCDeployManager
 import org.apache.nlpcraft.probe.mgrs.dialogflow.NCDialogFlowManager
+import org.apache.nlpcraft.probe.mgrs.inspections.NCProbeInspectionManager
 import org.apache.nlpcraft.probe.mgrs.lifecycle.NCLifecycleManager
 import org.apache.nlpcraft.probe.mgrs.model.NCModelManager
 import org.apache.nlpcraft.probe.mgrs.nlp.NCProbeEnrichmentManager
@@ -422,6 +423,7 @@ private [probe] object NCProbeBoot extends LazyLogging with 
NCOpenCensusTrace {
             NCNlpCoreManager.start(span)
             NCNumericManager.start(span)
             NCDeployManager.start(span)
+            NCProbeInspectionManager.start(span)
             NCModelManager.start(span)
             NCCommandManager.start(span)
             NCDictionaryManager.start(span)
@@ -461,6 +463,7 @@ private [probe] object NCProbeBoot extends LazyLogging with 
NCOpenCensusTrace {
             NCDictionaryManager.stop(span)
             NCCommandManager.stop(span)
             NCModelManager.stop(span)
+            NCProbeInspectionManager.stop(span)
             NCDeployManager.stop(span)
             NCNumericManager.stop(span)
             NCNlpCoreManager.stop(span)
diff --git 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/NCModelDecorator.scala 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/NCModelDecorator.scala
index 9805a16..9e4d238 100644
--- 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/NCModelDecorator.scala
+++ 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/NCModelDecorator.scala
@@ -32,7 +32,7 @@ import scala.language.implicitConversions
 /**
   *
   * @param model Decorated model.
-  * @param intentsSamples Model examples.
+  * @param intentSamples Intents samples.
   * @param synonyms Fast-access synonyms map for first phase.
   * @param synonymsDsl Fast-access synonyms map for second phase.
   * @param additionalStopWordsStems Stemmatized additional stopwords.
@@ -42,7 +42,7 @@ import scala.language.implicitConversions
   */
 case class NCModelDecorator(
     model: NCModel,
-    intentsSamples: Map[String, Seq[String]],
+    intentSamples: Map[String, Seq[String]],
     synonyms: Map[String/*Element ID*/, Map[Int/*Synonym length*/, 
Seq[NCSynonym]]], // Fast access map.
     synonymsDsl: Map[String/*Element ID*/, Map[Int/*Synonym length*/, 
Seq[NCSynonym]]], // Fast access map.
     additionalStopWordsStems: Set[String],
diff --git 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/cmd/NCCommandManager.scala
 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/cmd/NCCommandManager.scala
index 04ce26a..1c358a0 100644
--- 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/cmd/NCCommandManager.scala
+++ 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/cmd/NCCommandManager.scala
@@ -18,15 +18,20 @@
 package org.apache.nlpcraft.probe.mgrs.cmd
 
 import java.io.Serializable
+import java.util
 
+import com.google.gson.Gson
 import io.opencensus.trace.Span
 import org.apache.nlpcraft.common.NCService
+import org.apache.nlpcraft.common.inspections.NCInspectionType
 import org.apache.nlpcraft.common.nlp.NCNlpSentence
 import org.apache.nlpcraft.model.NCToken
 import org.apache.nlpcraft.probe.mgrs.NCProbeMessage
+import org.apache.nlpcraft.probe.mgrs.conn.NCConnectionManager
+import org.apache.nlpcraft.probe.mgrs.conversation.NCConversationManager
 import org.apache.nlpcraft.probe.mgrs.dialogflow.NCDialogFlowManager
+import org.apache.nlpcraft.probe.mgrs.inspections.NCProbeInspectionManager
 import org.apache.nlpcraft.probe.mgrs.nlp.NCProbeEnrichmentManager
-import org.apache.nlpcraft.probe.mgrs.conversation.NCConversationManager
 
 import scala.collection.JavaConverters._
 
@@ -34,6 +39,8 @@ import scala.collection.JavaConverters._
   * Probe commands processor.
   */
 object NCCommandManager extends NCService {
+    private final val GSON = new Gson()
+
     override def start(parent: Span = null): NCService = 
startScopedSpan("start", parent) { _ ⇒
         super.start()
     }
@@ -84,8 +91,28 @@ object NCCommandManager extends NCService {
                             mdlId = msg.data[String]("mdlId"),
                             logEnable = msg.data[Boolean]("logEnable"),
                             span
-                     )
-    
+                    )
+
+                    case "S2P_MODEL_INSPECTION" ⇒
+                        val resJs: util.Map[String, util.Map[String, AnyRef]] =
+                            NCProbeInspectionManager.inspect(
+                                mdlId = msg.data[String]("mdlId"),
+                                types =
+                                    msg.data[java.util.List[String]]("types").
+                                    asScala.
+                                    map(p ⇒ 
NCInspectionType.withName(p.toUpperCase)),
+                                span
+                            ).map { case (typ, inspection) ⇒ typ.toString → 
inspection.serialize() }.asJava
+
+                            NCConnectionManager.send(
+                                NCProbeMessage(
+                                    "P2S_MODEL_INSPECTION",
+                                    "reqGuid" → msg.getGuid,
+                                    "resp" → GSON.toJson(resJs)
+                                ),
+                                span
+                            )
+
                     case _ ⇒
                         logger.error(s"Received unknown server message (you 
need to update the probe): ${msg.getType}")
                 }
diff --git 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/conn/NCConnectionManager.scala
 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/conn/NCConnectionManager.scala
index 5377cb1..96bb4c2 100644
--- 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/conn/NCConnectionManager.scala
+++ 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/conn/NCConnectionManager.scala
@@ -231,7 +231,6 @@ object NCConnectionManager extends NCService {
                         NCModelManager.getAllModels().map(m ⇒ {
                             val mdl = m.model
 
-                            require(m.intentsSamples != null)
                             // Model already validated.
 
                             // util.HashSet created to avoid scala collections 
serialization error.
@@ -240,16 +239,7 @@ object NCConnectionManager extends NCService {
                                 mdl.getId,
                                 mdl.getName,
                                 mdl.getVersion,
-                                new 
util.HashSet[String](mdl.getEnabledBuiltInTokens),
-                                mdl.getMacros,
-                                new util.HashMap[String, util.List[String]](
-                                    mdl.getElements.asScala.map(p ⇒ p.getId → 
p.getSynonyms).toMap.asJava
-                                ),
-                                new util.HashMap[String, util.List[String]]
-                                    (m.intentsSamples.map {
-                                        case (intentId, samples)  ⇒
-                                            intentId → new 
util.ArrayList[String](samples.asJava) }.asJava
-                                    )
+                                new 
util.HashSet[String](mdl.getEnabledBuiltInTokens)
                             )
                         })
                 ), cryptoKey)
diff --git 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/deploy/NCDeployManager.scala
 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/deploy/NCDeployManager.scala
index 12823a7..59da69a 100644
--- 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/deploy/NCDeployManager.scala
+++ 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/deploy/NCDeployManager.scala
@@ -18,15 +18,17 @@
 package org.apache.nlpcraft.probe.mgrs.deploy
 
 import java.io._
-import java.util.jar.{JarInputStream ⇒ JIS}
+import java.util.jar.{JarInputStream => JIS}
 
 import io.opencensus.trace.Span
 import org.apache.nlpcraft.common._
 import org.apache.nlpcraft.common.config.NCConfigurable
+import org.apache.nlpcraft.common.inspections.NCInspectionType
 import org.apache.nlpcraft.model._
 import org.apache.nlpcraft.model.factories.basic.NCBasicModelFactory
 import org.apache.nlpcraft.model.impl.NCModelImpl
 import org.apache.nlpcraft.model.intent.impl.{NCIntentScanner, NCIntentSolver}
+import org.apache.nlpcraft.probe.mgrs.inspections.NCProbeInspectionManager
 import resource.managed
 
 import scala.collection.JavaConverters._
@@ -45,7 +47,7 @@ object NCDeployManager extends NCService with DecorateAsScala 
{
 
     private final val ID_REGEX = "^[_a-zA-Z]+[a-zA-Z0-9:-_]*$"
 
-    @volatile private var models: ArrayBuffer[NCModelHolder] = _
+    @volatile private var models: ArrayBuffer[NCModel] = _
     @volatile private var modelFactory: NCModelFactory = _
 
     object Config extends NCConfigurable {
@@ -79,7 +81,7 @@ object NCDeployManager extends NCService with DecorateAsScala 
{
       * @return
       */
     @throws[NCE]
-    private def wrap(mdl: NCModel): NCModelHolder = {
+    private def wrap(mdl: NCModel): NCModel = {
         checkCollection("additionalStopWords", mdl.getAdditionalStopWords)
         checkCollection("elements", mdl.getElements)
         checkCollection("enabledBuiltInTokens", mdl.getEnabledBuiltInTokens)
@@ -92,25 +94,27 @@ object NCDeployManager extends NCService with 
DecorateAsScala {
         // Scan for intent annotations in the model class.
         val intents = NCIntentScanner.scan(mdl)
 
+        val mdlId = mdl.getId
+
         if (intents.nonEmpty) {
             // Check the uniqueness of intent IDs.
             U.getDups(intents.keys.toSeq.map(_.id)) match {
-                case ids if ids.nonEmpty ⇒ throw new NCE(s"Duplicate intent 
IDs found for '${mdl.getId}' model: ${ids.mkString(",")}")
+                case ids if ids.nonEmpty ⇒ throw new NCE(s"Duplicate intent 
IDs found for '$mdlId' model: ${ids.mkString(",")}")
                 case _ ⇒ ()
             }
     
-            logger.info(s"Intents found in the model: ${mdl.getId}")
+            logger.info(s"Intents found in the model: $mdlId")
 
             val solver = new NCIntentSolver(
                 intents.toList.map(x ⇒ (x._1, (z: NCIntentMatch) ⇒ 
x._2.apply(z)))
             )
 
-            NCModelHolder(new NCModelImpl(mdl, solver), 
NCIntentScanner.scanIntentsSamples(mdl).toMap)
+            new NCModelImpl(mdl, solver)
         }
         else {
-            logger.warn(s"Model has no intents: ${mdl.getId}")
+            logger.warn(s"Model has no intents: $mdlId")
 
-            NCModelHolder(new NCModelImpl(mdl, null), Map.empty)
+            new NCModelImpl(mdl, null)
         }
     }
 
@@ -145,7 +149,7 @@ object NCDeployManager extends NCService with 
DecorateAsScala {
       * @param clsName Model class name.
       */
     @throws[NCE]
-    private def makeModel(clsName: String): NCModelHolder =
+    private def makeModel(clsName: String): NCModel =
         try
             wrap(
                 makeModelFromSource(
@@ -180,7 +184,7 @@ object NCDeployManager extends NCService with 
DecorateAsScala {
       * @param jarFile JAR file to extract from.
       */
     @throws[NCE]
-    private def extractModels(jarFile: File): Seq[NCModelHolder] = {
+    private def extractModels(jarFile: File): Seq[NCModel] = {
         val clsLdr = Thread.currentThread().getContextClassLoader
         
         val classes = mutable.ArrayBuffer.empty[Class[_ <: NCModel]]
@@ -220,7 +224,7 @@ object NCDeployManager extends NCService with 
DecorateAsScala {
     @throws[NCE]
     override def start(parent: Span = null): NCService = 
startScopedSpan("start", parent) { _ ⇒
         modelFactory = new NCBasicModelFactory
-        models = ArrayBuffer.empty[NCModelHolder]
+        models = ArrayBuffer.empty[NCModel]
 
         // Initialize model factory (if configured).
         Config.modelFactoryType match {
@@ -253,9 +257,7 @@ object NCDeployManager extends NCService with 
DecorateAsScala {
         }
 
         // Verify models' identities.
-        models.foreach(h ⇒ {
-            val mdl = h.model
-
+        models.foreach(mdl ⇒ {
             val mdlName = mdl.getName
             val mdlId = mdl.getId
             val mdlVer = mdl.getVersion
@@ -284,7 +286,7 @@ object NCDeployManager extends NCService with 
DecorateAsScala {
                     throw new NCE(s"Model element ID '${elm.getId}' does not 
match '$ID_REGEX' regex in: $mdlId")
         })
 
-        if (U.containsDups(models.map(_.model.getId).toList))
+        if (U.containsDups(models.map(_.getId).toList))
             throw new NCE("Duplicate model IDs detected.")
         
         super.start()
@@ -305,5 +307,5 @@ object NCDeployManager extends NCService with 
DecorateAsScala {
       *
       * @return
       */
-    def getModels: Seq[NCModelHolder] = models
+    def getModels: Seq[NCModel] = models
 }
diff --git 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/inspections/NCProbeInspectionManager.scala
 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/inspections/NCProbeInspectionManager.scala
new file mode 100644
index 0000000..e5fd81b
--- /dev/null
+++ 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/inspections/NCProbeInspectionManager.scala
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.nlpcraft.probe.mgrs.inspections
+
+import io.opencensus.trace.Span
+import org.apache.nlpcraft.common.inspections.NCInspectionType._
+import org.apache.nlpcraft.common.inspections.{NCInspection, NCInspectionType}
+import org.apache.nlpcraft.common.{NCE, NCService}
+import org.apache.nlpcraft.model.opencensus.stats.NCOpenCensusModelStats
+import org.apache.nlpcraft.probe.mgrs.inspections.inspectors._
+
+object NCProbeInspectionManager extends NCService with NCOpenCensusModelStats {
+    private final val INSPECTORS =
+        Map(
+            SUGGEST_SYNONYMS → NCInspectorSynonymsSuggestions,
+            INSPECTION_MACROS → NCInspectorMacros,
+            INSPECTION_SYNONYMS → NCInspectorSynonyms,
+            INSPECTION_INTENTS → NCInspectorIntents
+        )
+
+    require(NCInspectionType.values.forall(INSPECTORS.contains))
+
+    override def start(parent: Span): NCService = startScopedSpan("start", 
parent) { _ ⇒
+        INSPECTORS.values.foreach(_.start())
+
+        super.start(parent)
+    }
+
+    override def stop(parent: Span): Unit = startScopedSpan("stop", parent) { 
_ ⇒
+        super.stop()
+
+        INSPECTORS.values.foreach(_.stop())
+    }
+
+    @throws[NCE]
+    def inspect(mdlId: String, types: Seq[NCInspectionType], parent: Span = 
null): Map[NCInspectionType, NCInspection] =
+        startScopedSpan("inspect", parent) { _ ⇒
+            types.map(t ⇒  t → INSPECTORS(t).inspect(mdlId, parent = 
parent)).toMap
+        }
+}
diff --git 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/model/NCEnhanceElement.scala
 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/inspections/inspectors/NCInspectorIntents.scala
similarity index 54%
rename from 
nlpcraft/src/main/scala/org/apache/nlpcraft/server/model/NCEnhanceElement.scala
rename to 
nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/inspections/inspectors/NCInspectorIntents.scala
index aa6c3ad..ffab3e5 100644
--- 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/model/NCEnhanceElement.scala
+++ 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/inspections/inspectors/NCInspectorIntents.scala
@@ -15,18 +15,20 @@
  * limitations under the License.
  */
 
-package org.apache.nlpcraft.server.model
+package org.apache.nlpcraft.probe.mgrs.inspections.inspectors
 
-import org.apache.nlpcraft.server.model.NCEnhanceType.NCEnhanceType
+import io.opencensus.trace.Span
+import org.apache.nlpcraft.common.inspections.{NCInspection, NCInspector}
+import org.apache.nlpcraft.common.{NCE, NCService}
+import org.apache.nlpcraft.probe.mgrs.model.NCModelManager
 
-/**
-  * TODO:
-  */
-case class NCEnhanceElement(
-    enhanceType: NCEnhanceType,
-    errors: Option[Seq[String]] = None,
-    warnings: Option[Seq[String]] = None,
-    // Note that `suggestions` should be simple type or java collections.
-    // Scala collections cannot be converted into JSON.
-    suggestions: Option[AnyRef] = None
-)
+object NCInspectorIntents extends NCService with NCInspector {
+    override def inspect(mdlId: String, data: Option[AnyRef], parent: Span = 
null): NCInspection =
+        startScopedSpan("inspect", parent) { _ ⇒
+            val mdl = NCModelManager.getModel(mdlId).getOrElse(throw new 
NCE(s"Model not found: $mdlId")).model
+
+            NCInspection(
+                // TODO:
+            )
+        }
+}
diff --git 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/inspections/inspectors/NCInspectorMacros.scala
 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/inspections/inspectors/NCInspectorMacros.scala
new file mode 100644
index 0000000..78e54b8
--- /dev/null
+++ 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/inspections/inspectors/NCInspectorMacros.scala
@@ -0,0 +1,42 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.nlpcraft.probe.mgrs.inspections.inspectors
+
+import io.opencensus.trace.Span
+import org.apache.nlpcraft.common.inspections.{NCInspection, NCInspector}
+import org.apache.nlpcraft.common.{NCE, NCService}
+import org.apache.nlpcraft.probe.mgrs.model.NCModelManager
+
+import scala.collection.JavaConverters._
+
+object NCInspectorMacros extends NCService with NCInspector {
+    override def inspect(mdlId: String, data: Option[AnyRef], parent: Span = 
null): NCInspection =
+        startScopedSpan("inspect", parent) { _ ⇒
+            val mdl = NCModelManager.getModel(mdlId).getOrElse(throw new 
NCE(s"Model not found: $mdlId")).model
+
+            val syns = mdl.getElements.asScala.flatMap(_.getSynonyms.asScala)
+
+            val warns =
+                mdl.getMacros.asScala.keys.
+                // TODO: is it valid check?
+                flatMap(m ⇒ if (syns.exists(_.contains(m))) None else 
Some(s"Macro is not used: $m")).
+                toSeq
+
+            NCInspection(warnings = if (warns.isEmpty) None else Some(warns))
+        }
+}
diff --git 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/inspections/inspectors/NCInspectorSynonyms.scala
 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/inspections/inspectors/NCInspectorSynonyms.scala
new file mode 100644
index 0000000..9a7ed10
--- /dev/null
+++ 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/inspections/inspectors/NCInspectorSynonyms.scala
@@ -0,0 +1,63 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.nlpcraft.probe.mgrs.inspections.inspectors
+
+import io.opencensus.trace.Span
+import org.apache.nlpcraft.common.inspections.{NCInspection, NCInspector}
+import org.apache.nlpcraft.common.makro.NCMacroParser
+import org.apache.nlpcraft.common.{NCE, NCService}
+import org.apache.nlpcraft.probe.mgrs.model.NCModelManager
+
+import scala.collection.JavaConverters._
+import scala.collection.mutable
+
+object NCInspectorSynonyms extends NCService with NCInspector {
+    private final val TOO_MANY_SYNS = 10000
+
+    override def inspect(mdlId: String, data: Option[AnyRef], parent: Span = 
null): NCInspection =
+        startScopedSpan("inspect", parent) { _ ⇒
+            val mdl = NCModelManager.getModel(mdlId).getOrElse(throw new 
NCE(s"Model not found: $mdlId")).model
+
+            val warns = mutable.ArrayBuffer.empty[String]
+
+            val parser = new NCMacroParser()
+
+            mdl.getMacros.asScala.foreach { case (name, str) ⇒ 
parser.addMacro(name, str) }
+
+            val mdlSyns = mdl.getElements.asScala.map(p ⇒ p.getId → 
p.getSynonyms.asScala.flatMap(parser.expand))
+
+            mdlSyns.foreach { case (elemId, syns) ⇒
+                val size = syns.size
+
+                if (size == 0)
+                    warns += s"Element: '$elemId' doesn't have synonyms"
+                else if (size > TOO_MANY_SYNS)
+                    warns += s"Element: '$elemId' has too many synonyms: $size"
+
+                val others = mdlSyns.filter { case (othId, _) ⇒ othId != 
elemId}
+
+                val intersects =
+                    others.filter { case (_, othSyns) ⇒ 
othSyns.intersect(syns).nonEmpty }.toMap.keys.mkString(",")
+
+                if (intersects.nonEmpty)
+                    warns += s"Element: '$elemId' has same synonyms with 
'$intersects'"
+            }
+
+            NCInspection(warnings = if (warns.isEmpty) None else Some(warns))
+        }
+}
diff --git 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/inspections/inspectors/NCInspectorSynonymsSuggestions.scala
 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/inspections/inspectors/NCInspectorSynonymsSuggestions.scala
new file mode 100644
index 0000000..3a1c228
--- /dev/null
+++ 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/inspections/inspectors/NCInspectorSynonymsSuggestions.scala
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.nlpcraft.probe.mgrs.inspections.inspectors
+
+import java.util
+
+import io.opencensus.trace.Span
+import org.apache.nlpcraft.common.inspections.{NCInspection, NCInspector}
+import org.apache.nlpcraft.common.{NCE, NCService}
+import org.apache.nlpcraft.model.intent.impl.NCIntentScanner
+import org.apache.nlpcraft.probe.mgrs.model.NCModelManager
+
+import scala.collection.JavaConverters._
+
+object NCInspectorSynonymsSuggestions extends NCService with NCInspector {
+    override def inspect(mdlId: String, data: Option[AnyRef], parent: Span = 
null): NCInspection =
+        startScopedSpan("inspect", parent) { _ ⇒
+            val mdl = NCModelManager.getModel(mdlId).getOrElse(throw new 
NCE(s"Model not found: $mdlId")).model
+
+            val m = new util.HashMap[String, Any]()
+
+            m.put("macros", mdl.getMacros)
+            m.put("elementsSynonyms", new util.HashMap[String, 
util.List[String]](
+                mdl.getElements.asScala.map(p ⇒ p.getId → 
p.getSynonyms).toMap.asJava
+            ))
+            m.put("intentsSamples", new util.HashMap[String, 
util.List[String]](
+                NCIntentScanner.scanIntentsSamples(mdl).toMap.map {
+                    case (intentId, samples) ⇒ intentId → samples.asJava
+                }.asJava
+            ))
+
+            NCInspection(data = Some(m))
+        }
+}
diff --git 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/model/NCModelManager.scala
 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/model/NCModelManager.scala
index 56940d5..2538b1e 100644
--- 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/model/NCModelManager.scala
+++ 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/model/NCModelManager.scala
@@ -23,11 +23,14 @@ import io.opencensus.trace.Span
 import org.apache.nlpcraft.common._
 import org.apache.nlpcraft.common.util.NCUtils._
 import org.apache.nlpcraft.common.ascii.NCAsciiTable
+import org.apache.nlpcraft.common.inspections.NCInspectionType
 import org.apache.nlpcraft.common.makro.NCMacroParser
 import org.apache.nlpcraft.common.nlp.core.NCNlpCoreManager
 import org.apache.nlpcraft.model._
+import org.apache.nlpcraft.model.intent.impl.NCIntentScanner
 import org.apache.nlpcraft.probe.mgrs.NCSynonymChunkKind._
 import org.apache.nlpcraft.probe.mgrs.deploy._
+import org.apache.nlpcraft.probe.mgrs.inspections.NCProbeInspectionManager
 import org.apache.nlpcraft.probe.mgrs.{NCModelDecorator, NCSynonym, 
NCSynonymChunk}
 
 import collection.convert.ImplicitConversions._
@@ -59,22 +62,22 @@ object NCModelManager extends NCService with 
DecorateAsScala {
     )
     
     /**
-      * @param h Data model holder.
+      * @param mdl Model.
       */
-    private def addNewModel(h: NCModelHolder): Unit = {
+    private def addNewModel(mdl: NCModel): Unit = {
         require(Thread.holdsLock(mux))
 
-        checkModelConfig(h.model)
+        checkModelConfig(mdl)
 
         val parser = new NCMacroParser
 
         // Initialize macro parser.
-        h.model.getMacros.asScala.foreach(t ⇒ parser.addMacro(t._1, t._2))
+        mdl.getMacros.asScala.foreach(t ⇒ parser.addMacro(t._1, t._2))
 
-        models += h.model.getId → verifyAndDecorate(h, parser)
+        models += mdl.getId → verifyAndDecorate(mdl, parser)
 
         // Init callback on the model.
-        h.model.onInit()
+        mdl.onInit()
     }
 
     @throws[NCE]
@@ -99,9 +102,33 @@ object NCModelManager extends NCService with 
DecorateAsScala {
                     mdl.elements.keySet.size,
                     synCnt
                 )
+
             })
 
             tbl.info(logger, Some(s"Models deployed: ${models.size}\n"))
+
+            models.values.foreach(mdl ⇒ {
+                val mdlId = mdl.model.getId
+
+                val inspections = NCProbeInspectionManager.inspect(mdlId, 
NCInspectionType.values.toSeq)
+
+                inspections.foreach { case(_, inspection) ⇒
+                    inspection.errors match {
+                        case Some(errs) ⇒ errs.foreach(e ⇒ 
logger.error(s"Validation error [model=$mdlId, text=$e"))
+                        case None ⇒ // No-op.
+                    }
+
+                    inspection.warnings match {
+                        case Some(warns) ⇒ warns.foreach(w ⇒ 
logger.warn(s"Validation warning [model=$mdlId, text=$w"))
+                        case None ⇒ // No-op.
+                    }
+
+                    inspection.suggestions match {
+                        case Some(sugs) ⇒ sugs.foreach(s ⇒ 
logger.info(s"Validation suggestion [model=$mdlId, text=$s"))
+                        case None ⇒ // No-op.
+                    }
+                }
+            })
             
             addTags(
                 span,
@@ -247,14 +274,12 @@ object NCModelManager extends NCService with 
DecorateAsScala {
     /**
       * Verifies given model and makes a decorator optimized for model 
enricher.
       *
-      * @param h Model holder to verify and decorate.
+      * @param mdl Model to verify and decorate.
       * @param parser Initialized macro parser.
       * @return Model decorator.
       */
     @throws[NCE]
-    private def verifyAndDecorate(h: NCModelHolder, parser: NCMacroParser): 
NCModelDecorator = {
-        val mdl = h.model
-
+    private def verifyAndDecorate(mdl: NCModel, parser: NCMacroParser): 
NCModelDecorator = {
         for (elm ← mdl.getElements)
             checkElement(mdl, elm)
 
@@ -526,7 +551,7 @@ object NCModelManager extends NCService with 
DecorateAsScala {
 
         NCModelDecorator(
             model = mdl,
-            intentsSamples = h.intentSamples,
+            NCIntentScanner.scanIntentsSamples(mdl).toMap,
             synonyms = mkFastAccessMap(filter(syns, dsl = false)),
             synonymsDsl = mkFastAccessMap(filter(syns, dsl = true)),
             additionalStopWordsStems = addStopWords,
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/NCServer.scala 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/NCServer.scala
index 34e85d0..1472c1d 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/NCServer.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/NCServer.scala
@@ -45,7 +45,7 @@ import org.apache.nlpcraft.server.proclog.NCProcessLogManager
 import org.apache.nlpcraft.server.query.NCQueryManager
 import org.apache.nlpcraft.server.rest.NCRestManager
 import org.apache.nlpcraft.server.sql.NCSqlManager
-import org.apache.nlpcraft.server.model.NCEnhanceManager
+import org.apache.nlpcraft.server.model.NCServerInspectorManager
 import org.apache.nlpcraft.server.tx.NCTxManager
 import org.apache.nlpcraft.server.user.NCUserManager
 
@@ -123,7 +123,7 @@ object NCServer extends App with NCIgniteInstance with 
LazyLogging with NCOpenCe
                 },
                 () ⇒ {
                     NCProbeManager.start(span)
-                    NCEnhanceManager.start(span)
+                    NCServerInspectorManager.start(span)
                 },
                 () ⇒ NCFeedbackManager.start(span)
             )
@@ -148,7 +148,7 @@ object NCServer extends App with NCIgniteInstance with 
LazyLogging with NCOpenCe
                 NCRestManager,
                 NCQueryManager,
                 NCFeedbackManager,
-                NCEnhanceManager,
+                NCServerInspectorManager,
                 NCProbeManager,
                 NCCompanyManager,
                 NCUserManager,
diff --git 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/mdo/NCProbeModelMdo.scala 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/mdo/NCProbeModelMdo.scala
index 0598ff6..1510c4b 100644
--- 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/mdo/NCProbeModelMdo.scala
+++ 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/mdo/NCProbeModelMdo.scala
@@ -27,10 +27,7 @@ case class NCProbeModelMdo(
     @NCMdoField id: String,
     @NCMdoField name: String,
     @NCMdoField version: String,
-    @NCMdoField enabledBuiltInTokens: Set[String],
-    @NCMdoField macros: Map[String, String],
-    @NCMdoField elementsSynonyms: Map[String, Seq[String]],
-    @NCMdoField intentsSamples: Map[String, Seq[String]]
+    @NCMdoField enabledBuiltInTokens: Set[String]
 ) extends NCAnnotatedMdo[NCProbeModelMdo] {
     override def hashCode(): Int = s"$id$name".hashCode()
     
diff --git 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/model/NCEnhanceManager.scala
 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/model/NCEnhanceManager.scala
deleted file mode 100644
index 6df4fa9..0000000
--- 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/model/NCEnhanceManager.scala
+++ /dev/null
@@ -1,524 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nlpcraft.server.model
-
-import java.util
-import java.util.concurrent.atomic.{AtomicInteger, AtomicReference}
-import java.util.concurrent.{ConcurrentHashMap, CopyOnWriteArrayList, 
CountDownLatch, TimeUnit}
-import java.util.{List ⇒ JList}
-
-import com.google.gson.Gson
-import com.google.gson.reflect.TypeToken
-import io.opencensus.trace.Span
-import org.apache.http.HttpResponse
-import org.apache.http.client.ResponseHandler
-import org.apache.http.client.methods.HttpPost
-import org.apache.http.entity.StringEntity
-import org.apache.http.impl.client.HttpClients
-import org.apache.http.util.EntityUtils
-import org.apache.nlpcraft.common.config.NCConfigurable
-import org.apache.nlpcraft.common.makro.NCMacroParser
-import org.apache.nlpcraft.common.nlp.core.NCNlpPorterStemmer
-import org.apache.nlpcraft.common.util.NCUtils
-import org.apache.nlpcraft.common.{NCE, NCService}
-import org.apache.nlpcraft.server.mdo.NCProbeModelMdo
-import org.apache.nlpcraft.server.model.NCEnhanceType._
-import org.apache.nlpcraft.server.probe.NCProbeManager
-
-import scala.collection.JavaConverters._
-import scala.collection._
-
-/**
-  * TODO: check all texts
-  */
-object NCEnhanceManager extends NCService {
-    // 1. SUGGEST_SYNONYMS
-    // For context word server requests.
-    private final val SUGGEST_SYNONYMS_MAX_LIMIT: Int = 10000
-    private final val SUGGEST_SYNONYMS_BATCH_SIZE = 20
-
-    // For warnings.
-    private final val SUGGEST_SYNONYMS_MIN_CNT_INTENT = 5
-    private final val SUGGEST_SYNONYMS_MIN_CNT_MODEL = 20
-
-    // 2. VALIDATION_MACROS
-
-    // 3. VALIDATION_SYNONYMS
-    private final val VALIDATION_SYNONYMS_MANY_SYNS = 20000
-
-    private object Config extends NCConfigurable {
-        val urlOpt: Option[String] = 
getStringOpt("nlpcraft.server.ctxword.url")
-        val suggestionsMinScore: Int = 
getInt("nlpcraft.server.ctxword.suggestions.minScore")
-
-        @throws[NCE]
-        def check(): Unit =
-            if (suggestionsMinScore < 0 || suggestionsMinScore > 1)
-                 throw new NCE("Invalid 
'nlpcraft.server.ctxword.suggestions.minScore' parameter value. It should be 
double value between 0 and 1, inclusive")
-    }
-
-    Config.check()
-
-    case class Suggestion(word: String, score: Double)
-    case class RequestData(sentence: String, example: String, elementId: 
String, index: Int)
-    case class RestRequestSentence(text: String, indexes: JList[Int])
-    case class RestRequest(sentences: JList[RestRequestSentence], limit: Int, 
min_score: Double)
-    case class Word(word: String, stem: String) {
-        require(!word.contains(" "), s"Word cannot contains spaces: $word")
-        require(
-            word.forall(ch ⇒
-                ch.isLetterOrDigit ||
-                    ch == '\'' ||
-                    SEPARATORS.contains(ch)
-            ),
-            s"Unsupported symbols: $word"
-        )
-    }
-
-    case class SuggestionResult(
-        synonym: String,
-        ctxWorldServerScore: Double,
-        suggestedCount: Int
-    )
-
-    case class Response(
-        errors: Option[Seq[String]] = None,
-        warnings: Option[Seq[String]] = None,
-        suggestions: Option[AnyRef] = None
-    )
-
-    private final val GSON = new Gson
-    private final val TYPE_RESP = new TypeToken[JList[JList[Suggestion]]]() 
{}.getType
-    private final val SEPARATORS = Seq('?', ',', '.', '-', '!')
-
-    private final val HANDLER: ResponseHandler[Seq[Seq[Suggestion]]] =
-        (resp: HttpResponse) ⇒ {
-            val code = resp.getStatusLine.getStatusCode
-            val e = resp.getEntity
-
-            val js = if (e != null) EntityUtils.toString(e) else null
-
-            if (js == null)
-                throw new RuntimeException(s"Unexpected empty response 
[code=$code]")
-
-            code match {
-                case 200 ⇒
-                    val data: JList[JList[Suggestion]] = GSON.fromJson(js, 
TYPE_RESP)
-
-                    data.asScala.map(p ⇒ if (p.isEmpty) Seq.empty else 
p.asScala.tail)
-
-                case 400 ⇒ throw new RuntimeException(js)
-                case _ ⇒ throw new RuntimeException(s"Unexpected response 
[code=$code, response=$js]")
-            }
-        }
-
-    private def split(s: String): Seq[String] = s.split(" 
").toSeq.map(_.trim).filter(_.nonEmpty)
-    private def toStem(s: String): String = 
split(s).map(NCNlpPorterStemmer.stem).mkString(" ")
-    private def toStemWord(s: String): String = NCNlpPorterStemmer.stem(s)
-
-    /**
-      *
-      * @param seq1
-      * @param seq2
-      */
-    private def getAllSlices(seq1: Seq[String], seq2: Seq[String]): Seq[Int] = 
{
-        val seq = mutable.Buffer.empty[Int]
-
-        var i = seq1.indexOfSlice(seq2)
-
-        while (i >= 0) {
-            seq += i
-
-            i = seq1.indexOfSlice(seq2, i + 1)
-        }
-
-        seq
-    }
-
-    /**
-      *
-      * @param typ
-      * @param resp
-      */
-    private def convert(typ: NCEnhanceType, resp: Response): NCEnhanceElement =
-        NCEnhanceElement(typ, resp.errors, resp.warnings, resp.suggestions)
-
-    /**
-      *
-      * @param seq
-      */
-    private def norm(seq: Seq[String]): Option[Seq[String]] = if (seq.isEmpty) 
None else Some(seq)
-
-    /**
-      *
-      * @param mdl
-      */
-    private def prepareParser(mdl: NCProbeModelMdo): NCMacroParser = {
-        val parser = new NCMacroParser()
-
-        mdl.macros.foreach { case (name, str) ⇒ parser.addMacro(name, str) }
-
-        parser
-    }
-
-    /**
-      *
-      * @param mdlId
-      * @param parent
-      */
-    @throws[NCE]
-    private def suggestSynonyms(mdlId: String, parent: Span = null): Response =
-        startScopedSpan("suggestSynonyms", parent, "modelId" → mdlId) { _ ⇒
-            val url = s"${Config.urlOpt.getOrElse(throw new NCE("Context word 
server is not configured"))}/suggestions"
-
-            val mdl = NCProbeManager.getModel(mdlId)
-
-            require(mdl.intentsSamples != null, "Samples cannot be null")
-            require(mdl.elementsSynonyms != null, "Element synonyms cannot be 
null")
-            require(mdl.macros != null, "Macros cannot be null")
-
-            val allSamplesCnt = mdl.intentsSamples.map { case (_, samples) ⇒ 
samples.size }.sum
-
-            val warns = mutable.ArrayBuffer.empty[String]
-
-            if (allSamplesCnt < SUGGEST_SYNONYMS_MIN_CNT_MODEL)
-                warns +=
-                    s"Model: '$mdlId' has too small intents samples count: 
$allSamplesCnt. " +
-                    s"Potentially is can be not enough for suggestions service 
high quality work. " +
-                    s"Try to increase their count at least to 
$SUGGEST_SYNONYMS_MIN_CNT_MODEL."
-
-            else {
-                val ids =
-                    mdl.intentsSamples.
-                        filter { case (_, samples) ⇒ samples.size < 
SUGGEST_SYNONYMS_MIN_CNT_INTENT }.
-                        map { case (intentId, _) ⇒ intentId }
-
-                if (ids.nonEmpty)
-                    warns +=
-                        s"Models '$mdlId' has intents: [${ids.mkString(", ")}] 
with too small intents samples count." +
-                            s"Potentially it can be not enough for suggestions 
service high quality work. " +
-                            s"Try to increase their count at least to 
$SUGGEST_SYNONYMS_MIN_CNT_INTENT."
-            }
-
-            val parser = prepareParser(mdl)
-
-            // Note that we don't use system tokenizer, because 
ContextWordServer doesn't have this tokenizer.
-            // We just split examples words with spaces. Also we divide 
SEPARATORS as separated words.
-            val examples =
-                mdl.
-                    intentsSamples.
-                    flatMap { case (_, samples) ⇒ samples }.
-                    map(ex ⇒ SEPARATORS.foldLeft(ex)((s, ch) ⇒ 
s.replaceAll(s"\\$ch", s" $ch "))).
-                    map(ex ⇒ {
-                        val seq = ex.split(" ")
-
-                        seq → seq.map(toStemWord)
-                    }).
-                    toMap
-
-            val elemSyns =
-                mdl.elementsSynonyms.map { case (elemId, syns) ⇒ elemId → 
syns.flatMap(parser.expand) }.
-                    map { case (id, seq) ⇒ id → seq.map(txt ⇒ split(txt).map(p 
⇒ Word(p, toStemWord(p)))) }
-
-            val allReqs =
-                elemSyns.map {
-                    case (elemId, syns) ⇒
-                        val normSyns: Seq[Seq[Word]] = syns.filter(_.size == 1)
-                        val synsStems = normSyns.map(_.map(_.stem))
-                        val synsWords = normSyns.map(_.map(_.word))
-
-                        val reqs =
-                            examples.flatMap { case (exampleWords, 
exampleStems) ⇒
-                                val exampleIdxs = synsStems.flatMap(synStems ⇒ 
getAllSlices(exampleStems, synStems))
-
-                                def mkRequestData(idx: Int, synStems: 
Seq[String], synStemsIdx: Int): RequestData = {
-                                    val fromIncl = idx
-                                    val toExcl = idx + synStems.length
-
-                                    RequestData(
-                                        sentence = 
exampleWords.zipWithIndex.flatMap {
-                                            case (exampleWord, i) ⇒
-                                                i match {
-                                                    case x if x == fromIncl ⇒ 
synsWords(synStemsIdx)
-                                                    case x if x > fromIncl && 
x < toExcl ⇒ Seq.empty
-                                                    case _ ⇒ Seq(exampleWord)
-                                                }
-                                        }.mkString(" "),
-                                        example = exampleWords.mkString(" "),
-                                        elementId = elemId,
-                                        index = idx
-                                    )
-                                }
-
-                                (for (idx ← exampleIdxs; (synStems, i) ← 
synsStems.zipWithIndex)
-                                    yield mkRequestData(idx, synStems, 
i)).distinct
-                            }
-
-                        elemId → reqs.toSet
-                }.filter(_._2.nonEmpty)
-
-            val noExElems =
-                mdl.elementsSynonyms.
-                    filter { case (elemId, syns) ⇒ syns.nonEmpty && 
!allReqs.contains(elemId) }.
-                    map { case (elemId, _) ⇒ elemId }
-
-            if (noExElems.nonEmpty)
-                warns +=
-                    "Some elements don't have synonyms in intent samples, " +
-                    s"so the service can't suggest any new synonyms for such 
elements: [${noExElems.mkString(", ")}]"
-
-            val allReqsCnt = allReqs.map(_._2.size).sum
-            val allSynsCnt = elemSyns.map(_._2.size).sum
-
-            logger.info(s"Data prepared [examples=${examples.size}, 
synonyms=$allSynsCnt, requests=$allReqsCnt]")
-
-            val allSuggs = new ConcurrentHashMap[String, JList[Suggestion]]()
-            val cdl = new CountDownLatch(1)
-            val debugs = mutable.HashMap.empty[RequestData, Seq[Suggestion]]
-            val cnt = new AtomicInteger(0)
-
-            val client = HttpClients.createDefault
-            val err = new AtomicReference[Throwable]()
-
-            for ((elemId, reqs) ← allReqs; batch ← 
reqs.sliding(SUGGEST_SYNONYMS_BATCH_SIZE, 
SUGGEST_SYNONYMS_BATCH_SIZE).map(_.toSeq)) {
-                NCUtils.asFuture(
-                    _ ⇒ {
-                        val post = new HttpPost(url)
-
-                        post.setHeader("Content-Type", "application/json")
-
-                        post.setEntity(
-                            new StringEntity(
-                                GSON.toJson(
-                                    RestRequest(
-                                        sentences = batch.map(p ⇒ 
RestRequestSentence(p.sentence, Seq(p.index).asJava)).asJava,
-                                        // ContextWord server range is (0, 2), 
input range is (0, 1)
-                                        min_score = Config.suggestionsMinScore 
* 2,
-                                        // We set big limit value and in fact 
only minimal score is taken into account.
-                                        limit = SUGGEST_SYNONYMS_MAX_LIMIT
-                                    )
-                                ),
-                                "UTF-8"
-                            )
-                        )
-
-                        val resps: Seq[Seq[Suggestion]] =
-                            try
-                                client.execute(post, HANDLER)
-                            finally
-                                post.releaseConnection()
-
-                        require(batch.size == resps.size, s"Batch: 
${batch.size}, responses: ${resps.size}")
-
-                        batch.zip(resps).foreach { case (req, resp) ⇒ debugs 
+= req → resp }
-
-                        val i = cnt.addAndGet(batch.size)
-
-                        logger.info(s"Executed: $i requests...")
-
-                        allSuggs.
-                            computeIfAbsent(elemId, (_: String) ⇒ new 
CopyOnWriteArrayList[Suggestion]()).
-                            addAll(resps.flatten.asJava)
-
-                        if (i == allReqsCnt)
-                            cdl.countDown()
-                    },
-                    (e: Throwable) ⇒ {
-                        err.compareAndSet(null, e)
-
-                        cdl.countDown()
-                    },
-                    (_: Unit) ⇒ ()
-                )
-            }
-
-            cdl.await(Long.MaxValue, TimeUnit.MILLISECONDS)
-
-            if (err.get() != null)
-                throw new NCE("Error during work with ContextWord Server", 
err.get())
-
-            val allSynsStems = elemSyns.flatMap(_._2).flatten.map(_.stem).toSet
-
-            val nonEmptySuggs = allSuggs.asScala.map(p ⇒ p._1 → 
p._2.asScala).filter(_._2.nonEmpty)
-
-            val res = mutable.HashMap.empty[String, 
mutable.ArrayBuffer[SuggestionResult]]
-
-            nonEmptySuggs.
-                foreach { case (elemId, elemSuggs) ⇒
-                    elemSuggs.
-                        map(sugg ⇒ (sugg, toStem(sugg.word))).
-                        groupBy { case (_, stem) ⇒ stem }.
-                        // Drops already defined.
-                        filter { case (stem, _) ⇒ !allSynsStems.contains(stem) 
}.
-                        map { case (_, group) ⇒
-                            val seq = group.map { case (sugg, _) ⇒ sugg 
}.sortBy(-_.score)
-
-                            // Drops repeated.
-                            (seq.head, seq.length)
-                        }.
-                        toSeq.
-                        map { case (sugg, cnt) ⇒ (sugg, cnt, sugg.score * cnt 
/ elemSuggs.size) }.
-                        sortBy { case (_, _, sumFactor) ⇒ -sumFactor }.
-                        zipWithIndex.
-                        foreach { case ((sugg, cnt, _), _) ⇒
-                            val seq =
-                                res.get(elemId) match {
-                                    case Some(seq) ⇒ seq
-                                    case None ⇒
-                                        val buf = 
mutable.ArrayBuffer.empty[SuggestionResult]
-
-                                        res += elemId → buf
-
-                                        buf
-                                }
-
-                            seq += SuggestionResult(sugg.word, sugg.score, cnt)
-                        }
-                }
-
-            logger.whenInfoEnabled({
-                var i = 1
-
-                debugs.groupBy(_._1.example).foreach { case (_, m) ⇒
-                    m.toSeq.sortBy(_._1.sentence).foreach { case (req, suggs) ⇒
-                        val s =
-                            split(req.sentence).
-                                zipWithIndex.map { case (w, i) ⇒ if (i == 
req.index) s"<<<$w>>>" else w }.
-                                mkString(" ")
-
-                        logger.info(
-                            s"$i. " +
-                                s"Request=$s, " +
-                                s"suggestions=[${suggs.map(_.word).mkString(", 
")}], " +
-                                s"element=${req.elementId}"
-                        )
-
-                        i = i + 1
-                    }
-                }
-            })
-
-            Response(
-                warnings = norm(warns),
-                suggestions = Some(
-                    res.map { case (id, data) ⇒
-                        id → data.map(d ⇒ {
-                            val m = new util.HashMap[String, Any]()
-
-                            m.put("synonym", d.synonym)
-                            m.put("ctxWorldServerScore", d.ctxWorldServerScore)
-                            m.put("suggestedCount", d.suggestedCount)
-
-                            m
-                        }).asJava
-                    }.asJava
-                )
-            )
-        }
-
-    /**
-      *
-      * @param mdlId
-      * @param parent
-      */
-    private def validateMacros(mdlId: String, parent: Span = null): Response =
-        startScopedSpan("validateMacros", parent, "modelId" → mdlId) { _ ⇒
-            val mdl = NCProbeManager.getModel(mdlId)
-            val syns = mdl.elementsSynonyms.values.flatten
-
-            Response(warnings =
-                norm(
-                    mdl.macros.keys.
-                    // TODO: is it valid check?
-                    flatMap(m ⇒ if (syns.exists(_.contains(m))) None else 
Some(s"Macro is not used: $m")).
-                    toSeq
-                )
-            )
-    }
-
-    /**
-      *
-      * @param mdlId
-      * @param parent
-      */
-    private def validateSynonyms(mdlId: String, parent: Span = null): Response 
=
-        startScopedSpan("validateSynonyms", parent, "modelId" → mdlId) { _ ⇒
-            val warns = mutable.ArrayBuffer.empty[String]
-
-            val mdl = NCProbeManager.getModel(mdlId)
-
-            val parser = prepareParser(mdl)
-
-            val mdlSyns = mdl.elementsSynonyms.map { case (elemId, syns) ⇒ 
elemId → syns.flatMap(parser.expand) }
-
-            mdlSyns.foreach { case (elemId, syns) ⇒
-                val size = syns.size
-
-                if (size == 0)
-                    warns += s"Element: '$elemId' doesn't have synonyms"
-                else if (size > VALIDATION_SYNONYMS_MANY_SYNS)
-                    warns += s"Element: '$elemId' has too many synonyms: $size"
-
-                val others = mdlSyns.filter { case (othId, _) ⇒ othId != 
elemId}
-
-                val intersects = others.filter { case (_, othSyns) ⇒ 
othSyns.intersect(syns).nonEmpty }.keys.mkString(",")
-
-                if (intersects.nonEmpty)
-                    warns += s"Element: '$elemId' has same synonyms with 
'$intersects'"
-            }
-
-            Response(warnings = norm(warns))
-        }
-
-    /**
-      *
-      * @param mdlId
-      * @param parent
-      */
-    private def validateIntents(mdlId: String, parent: Span = null): Response =
-        startScopedSpan("validateIntents", parent, "modelId" → mdlId) { _ ⇒
-            val mdl = NCProbeManager.getModel(mdlId)
-            val syns = mdl.elementsSynonyms.values.flatten
-
-            Response(warnings =
-                norm(
-                    mdl.macros.keys.
-                        // TODO: is it valid check?
-                        flatMap(m ⇒ if (syns.exists(_.contains(m))) None else 
Some(s"Macro is not used: $m")).
-                        toSeq
-                )
-            )
-        }
-
-
-    /**
-      *
-      * @param mdlId
-      * @param types
-      * @param parent
-      */
-    @throws[NCE]
-    def enhance(mdlId: String, types: Seq[NCEnhanceType], parent: Span = 
null): Seq[NCEnhanceElement] =
-        startScopedSpan("enhance", parent, "modelId" → mdlId) { _ ⇒
-            types.map {
-                case t@SUGGEST_SYNONYMS ⇒ convert(t, suggestSynonyms(mdlId, 
parent))
-                case t@VALIDATION_MACROS ⇒ convert(t, validateMacros(mdlId, 
parent))
-                case t@VALIDATION_SYNONYMS ⇒ convert(t, 
validateSynonyms(mdlId, parent))
-                case t@VALIDATION_INTENTS ⇒ convert(t, validateIntents(mdlId, 
parent))
-            }
-        }
-}
diff --git 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/model/NCServerInspectorManager.scala
 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/model/NCServerInspectorManager.scala
new file mode 100644
index 0000000..230be8d
--- /dev/null
+++ 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/model/NCServerInspectorManager.scala
@@ -0,0 +1,92 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.nlpcraft.server.model
+
+import io.opencensus.trace.Span
+import org.apache.nlpcraft.common.inspections.NCInspectionType._
+import org.apache.nlpcraft.common.inspections.{NCInspection, NCInspectionType}
+import org.apache.nlpcraft.common.{NCE, NCService}
+import org.apache.nlpcraft.server.model.inspectors._
+import org.apache.nlpcraft.server.probe.NCProbeManager
+
+import scala.collection._
+import scala.concurrent.ExecutionContext.Implicits.global
+import scala.concurrent.{Future, Promise}
+import scala.util.{Failure, Success}
+
+/**
+  * TODO: check all texts
+  */
+object NCServerInspectorManager extends NCService {
+    private final val INSPECTORS =
+        Map(
+            SUGGEST_SYNONYMS → NCInspectorSynonymsSuggestions
+        )
+
+    override def start(parent: Span): NCService = startScopedSpan("start", 
parent) { _ ⇒
+        INSPECTORS.values.foreach(_.start())
+
+        super.start(parent)
+    }
+
+    override def stop(parent: Span): Unit = startScopedSpan("stop", parent) { 
_ ⇒
+        super.stop()
+
+        INSPECTORS.values.foreach(_.stop())
+    }
+
+    /**
+      *
+      * @param mdlId
+      * @param types
+      * @param parent
+      */
+    @throws[NCE]
+    def inspect(mdlId: String, types: Seq[NCInspectionType], parent: Span = 
null): Future[Map[NCInspectionType, NCInspection]] =
+        startScopedSpan("enhance", parent, "modelId" → mdlId, "types" → 
types.map(_.toString)) { _ ⇒
+            val promise = Promise[Map[NCInspectionType, NCInspection]]()
+
+            NCProbeManager.inspect(mdlId, types, parent).onComplete {
+                case Success(map) ⇒
+                    map.map { case (typ, inspectionProbe) ⇒
+                        val inspectionSrv = INSPECTORS.get(typ) match {
+                            case Some(inspector) ⇒ inspector.inspect(mdlId, 
inspectionProbe.data)
+                            case None ⇒ NCInspection()
+                        }
+
+                        def union[T](seq1: Option[Seq[T]], seq2: 
Option[Seq[T]]): Option[Seq[T]] = {
+                            val seq = seq1.getOrElse(Seq.empty) ++ 
seq2.getOrElse(Seq.empty)
+
+                            if (seq.isEmpty) None else Some(seq)
+                        }
+
+                        typ → NCInspection(
+                            errors = union(inspectionProbe.errors, 
inspectionSrv.errors),
+                            warnings = union(inspectionProbe.warnings, 
inspectionSrv.warnings),
+                            suggestions = union(inspectionProbe.suggestions, 
inspectionSrv.suggestions),
+                            // Don't need pass this data on last step.
+                            data = None
+                        )
+                    }
+                    promise.success(map)
+                case Failure(err) ⇒ throw err
+            }(global)
+
+            promise.future
+        }
+}
diff --git 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/model/inspectors/NCInspectorSynonymsSuggestions.scala
 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/model/inspectors/NCInspectorSynonymsSuggestions.scala
new file mode 100644
index 0000000..46f0f2a
--- /dev/null
+++ 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/model/inspectors/NCInspectorSynonymsSuggestions.scala
@@ -0,0 +1,401 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.nlpcraft.server.model.inspectors
+
+import java.util
+import java.util.concurrent.atomic.{AtomicInteger, AtomicReference}
+import java.util.concurrent.{ConcurrentHashMap, CopyOnWriteArrayList, 
CountDownLatch, TimeUnit}
+import java.util.{List => JList}
+
+import com.google.gson.Gson
+import com.google.gson.reflect.TypeToken
+import io.opencensus.trace.Span
+import org.apache.http.HttpResponse
+import org.apache.http.client.ResponseHandler
+import org.apache.http.client.methods.HttpPost
+import org.apache.http.entity.StringEntity
+import org.apache.http.impl.client.HttpClients
+import org.apache.http.util.EntityUtils
+import org.apache.nlpcraft.common.config.NCConfigurable
+import org.apache.nlpcraft.common.inspections.{NCInspection, NCInspector}
+import org.apache.nlpcraft.common.makro.NCMacroParser
+import org.apache.nlpcraft.common.nlp.core.NCNlpPorterStemmer
+import org.apache.nlpcraft.common.util.NCUtils
+import org.apache.nlpcraft.common.{NCE, NCService}
+
+import scala.collection.JavaConverters._
+import scala.collection.{Seq, mutable}
+import scala.concurrent.ExecutionContext.Implicits.global
+
+object NCInspectorSynonymsSuggestions extends NCService with NCInspector {
+    // For context word server requests.
+    private final val MAX_LIMIT: Int = 10000
+    private final val BATCH_SIZE = 20
+
+    // For warnings.
+    private final val MIN_CNT_INTENT = 5
+    private final val MIN_CNT_MODEL = 20
+
+    private object Config extends NCConfigurable {
+        val urlOpt: Option[String] = 
getStringOpt("nlpcraft.server.ctxword.url")
+        val suggestionsMinScore: Int = 
getInt("nlpcraft.server.ctxword.suggestions.minScore")
+
+        @throws[NCE]
+        def check(): Unit =
+            if (suggestionsMinScore < 0 || suggestionsMinScore > 1)
+                throw new NCE("Invalid 
'nlpcraft.server.ctxword.suggestions.minScore' parameter value. It should be 
double value between 0 and 1, inclusive")
+    }
+
+    Config.check()
+
+    case class Suggestion(word: String, score: Double)
+    case class RequestData(sentence: String, example: String, elementId: 
String, index: Int)
+    case class RestRequestSentence(text: String, indexes: JList[Int])
+    case class RestRequest(sentences: JList[RestRequestSentence], limit: Int, 
min_score: Double)
+    case class Word(word: String, stem: String) {
+        require(!word.contains(" "), s"Word cannot contains spaces: $word")
+        require(
+            word.forall(ch ⇒
+                ch.isLetterOrDigit ||
+                    ch == '\'' ||
+                    SEPARATORS.contains(ch)
+            ),
+            s"Unsupported symbols: $word"
+        )
+    }
+
+    case class SuggestionResult(
+        synonym: String,
+        ctxWorldServerScore: Double,
+        suggestedCount: Int
+    )
+
+    private final val GSON = new Gson
+    private final val TYPE_RESP = new TypeToken[JList[JList[Suggestion]]]() 
{}.getType
+    private final val SEPARATORS = Seq('?', ',', '.', '-', '!')
+
+    private final val HANDLER: ResponseHandler[Seq[Seq[Suggestion]]] =
+        (resp: HttpResponse) ⇒ {
+            val code = resp.getStatusLine.getStatusCode
+            val e = resp.getEntity
+
+            val js = if (e != null) EntityUtils.toString(e) else null
+
+            if (js == null)
+                throw new RuntimeException(s"Unexpected empty response 
[code=$code]")
+
+            code match {
+                case 200 ⇒
+                    val data: JList[JList[Suggestion]] = GSON.fromJson(js, 
TYPE_RESP)
+
+                    data.asScala.map(p ⇒ if (p.isEmpty) Seq.empty else 
p.asScala.tail)
+
+                case 400 ⇒ throw new RuntimeException(js)
+                case _ ⇒ throw new RuntimeException(s"Unexpected response 
[code=$code, response=$js]")
+            }
+        }
+
+    private def split(s: String): Seq[String] = s.split(" 
").toSeq.map(_.trim).filter(_.nonEmpty)
+    private def toStem(s: String): String = 
split(s).map(NCNlpPorterStemmer.stem).mkString(" ")
+    private def toStemWord(s: String): String = NCNlpPorterStemmer.stem(s)
+
+    /**
+      *
+      * @param seq1
+      * @param seq2
+      */
+    private def getAllSlices(seq1: Seq[String], seq2: Seq[String]): Seq[Int] = 
{
+        val seq = mutable.Buffer.empty[Int]
+
+        var i = seq1.indexOfSlice(seq2)
+
+        while (i >= 0) {
+            seq += i
+
+            i = seq1.indexOfSlice(seq2, i + 1)
+        }
+
+        seq
+    }
+
+    override def inspect(mdlId: String, data: Option[AnyRef], parent: Span = 
null): NCInspection =
+        startScopedSpan("inspect", parent) { _ ⇒
+            val m: util.Map[String, AnyRef] =
+                data.
+                    getOrElse(throw new NCE(s"Missed suggestions data for 
model: $mdlId")).
+                    asInstanceOf[util.Map[String, AnyRef]]
+
+            val macrosJ = m.get("macros").asInstanceOf[util.Map[String, 
String]]
+            val elementsSynonymsJ = 
m.get("elementsSynonyms").asInstanceOf[util.Map[String, util.List[String]]]
+            val intentsSamplesJ = 
m.get("intentsSamples").asInstanceOf[util.Map[String, util.List[String]]]
+
+            require(macrosJ != null)
+            require(elementsSynonymsJ != null)
+            require(intentsSamplesJ != null)
+
+            val macros = macrosJ.asScala
+            val elementsSynonyms = elementsSynonymsJ.asScala.map(p ⇒ p._1 → 
p._2.asScala)
+            val intentsSamples = intentsSamplesJ.asScala.map(p ⇒ p._1 → 
p._2.asScala)
+
+            if (intentsSamples.isEmpty)
+                NCInspection()
+            else {
+                val url = s"${Config.urlOpt.getOrElse(throw new NCE("Context 
word server is not configured"))}/suggestions"
+
+                val allSamplesCnt = intentsSamples.map { case (_, samples) ⇒ 
samples.size }.sum
+
+                val warns = mutable.ArrayBuffer.empty[String]
+
+                if (allSamplesCnt < MIN_CNT_MODEL)
+                    warns +=
+                        s"Model: '$mdlId' has too small intents samples count: 
$allSamplesCnt. " +
+                            s"Potentially is can be not enough for suggestions 
service high quality work. " +
+                            s"Try to increase their count at least to 
$MIN_CNT_MODEL."
+
+                else {
+                    val ids =
+                        intentsSamples.
+                            filter { case (_, samples) ⇒ samples.size < 
MIN_CNT_INTENT }.
+                            map { case (intentId, _) ⇒ intentId }
+
+                    if (ids.nonEmpty)
+                        warns +=
+                            s"Models '$mdlId' has intents: [${ids.mkString(", 
")}] with too small intents samples count." +
+                                s"Potentially it can be not enough for 
suggestions service high quality work. " +
+                                s"Try to increase their count at least to 
$MIN_CNT_INTENT."
+                }
+
+                val parser = new NCMacroParser()
+
+                macros.foreach { case (name, str) ⇒ parser.addMacro(name, str) 
}
+
+                // Note that we don't use system tokenizer, because 
ContextWordServer doesn't have this tokenizer.
+                // We just split examples words with spaces. Also we divide 
SEPARATORS as separated words.
+                val examples =
+                intentsSamples.
+                    flatMap { case (_, samples) ⇒ samples }.
+                    map(ex ⇒ SEPARATORS.foldLeft(ex)((s, ch) ⇒ 
s.replaceAll(s"\\$ch", s" $ch "))).
+                    map(ex ⇒ {
+                        val seq = ex.split(" ")
+
+                        seq → seq.map(toStemWord)
+                    }).
+                    toMap
+
+                val elemSyns =
+                    elementsSynonyms.map { case (elemId, syns) ⇒ elemId → 
syns.flatMap(parser.expand) }.
+                        map { case (id, seq) ⇒ id → seq.map(txt ⇒ 
split(txt).map(p ⇒ Word(p, toStemWord(p)))) }
+
+                val allReqs =
+                    elemSyns.map {
+                        case (elemId, syns) ⇒
+                            val normSyns: Seq[Seq[Word]] = syns.filter(_.size 
== 1)
+                            val synsStems = normSyns.map(_.map(_.stem))
+                            val synsWords = normSyns.map(_.map(_.word))
+
+                            val reqs =
+                                examples.flatMap { case (exampleWords, 
exampleStems) ⇒
+                                    val exampleIdxs = 
synsStems.flatMap(synStems ⇒ getAllSlices(exampleStems, synStems))
+
+                                    def mkRequestData(idx: Int, synStems: 
Seq[String], synStemsIdx: Int): RequestData = {
+                                        val fromIncl = idx
+                                        val toExcl = idx + synStems.length
+
+                                        RequestData(
+                                            sentence = 
exampleWords.zipWithIndex.flatMap {
+                                                case (exampleWord, i) ⇒
+                                                    i match {
+                                                        case x if x == 
fromIncl ⇒ synsWords(synStemsIdx)
+                                                        case x if x > fromIncl 
&& x < toExcl ⇒ Seq.empty
+                                                        case _ ⇒ 
Seq(exampleWord)
+                                                    }
+                                            }.mkString(" "),
+                                            example = exampleWords.mkString(" 
"),
+                                            elementId = elemId,
+                                            index = idx
+                                        )
+                                    }
+
+                                    (for (idx ← exampleIdxs; (synStems, i) ← 
synsStems.zipWithIndex)
+                                        yield mkRequestData(idx, synStems, 
i)).distinct
+                                }
+
+                            elemId → reqs.toSet
+                    }.filter(_._2.nonEmpty)
+
+                val noExElems =
+                    elementsSynonyms.
+                        filter { case (elemId, syns) ⇒ syns.nonEmpty && 
!allReqs.contains(elemId) }.
+                        map { case (elemId, _) ⇒ elemId }
+
+                if (noExElems.nonEmpty)
+                    warns +=
+                        "Some elements don't have synonyms in intent samples, 
" +
+                            s"so the service can't suggest any new synonyms 
for such elements: [${noExElems.mkString(", ")}]"
+
+                val allReqsCnt = allReqs.map(_._2.size).sum
+                val allSynsCnt = elemSyns.map(_._2.size).sum
+
+                logger.info(s"Data prepared [examples=${examples.size}, 
synonyms=$allSynsCnt, requests=$allReqsCnt]")
+
+                val allSuggs = new ConcurrentHashMap[String, 
JList[Suggestion]]()
+                val cdl = new CountDownLatch(1)
+                val debugs = mutable.HashMap.empty[RequestData, 
Seq[Suggestion]]
+                val cnt = new AtomicInteger(0)
+
+                val client = HttpClients.createDefault
+                val err = new AtomicReference[Throwable]()
+
+                for ((elemId, reqs) ← allReqs; batch ← 
reqs.sliding(BATCH_SIZE, BATCH_SIZE).map(_.toSeq)) {
+                    NCUtils.asFuture(
+                        _ ⇒ {
+                            val post = new HttpPost(url)
+
+                            post.setHeader("Content-Type", "application/json")
+
+                            post.setEntity(
+                                new StringEntity(
+                                    GSON.toJson(
+                                        RestRequest(
+                                            sentences = batch.map(p ⇒ 
RestRequestSentence(p.sentence, Seq(p.index).asJava)).asJava,
+                                            // ContextWord server range is (0, 
2), input range is (0, 1)
+                                            min_score = 
Config.suggestionsMinScore * 2,
+                                            // We set big limit value and in 
fact only minimal score is taken into account.
+                                            limit = MAX_LIMIT
+                                        )
+                                    ),
+                                    "UTF-8"
+                                )
+                            )
+
+                            val resps: Seq[Seq[Suggestion]] =
+                                try
+                                    client.execute(post, HANDLER)
+                                finally
+                                    post.releaseConnection()
+
+                            require(batch.size == resps.size, s"Batch: 
${batch.size}, responses: ${resps.size}")
+
+                            batch.zip(resps).foreach { case (req, resp) ⇒ 
debugs += req → resp }
+
+                            val i = cnt.addAndGet(batch.size)
+
+                            logger.info(s"Executed: $i requests...")
+
+                            allSuggs.
+                                computeIfAbsent(elemId, (_: String) ⇒ new 
CopyOnWriteArrayList[Suggestion]()).
+                                addAll(resps.flatten.asJava)
+
+                            if (i == allReqsCnt)
+                                cdl.countDown()
+                        },
+                        (e: Throwable) ⇒ {
+                            err.compareAndSet(null, e)
+
+                            cdl.countDown()
+                        },
+                        (_: Unit) ⇒ ()
+                    )
+                }
+
+                cdl.await(Long.MaxValue, TimeUnit.MILLISECONDS)
+
+                if (err.get() != null)
+                    throw new NCE("Error during work with ContextWord Server", 
err.get())
+
+                val allSynsStems = 
elemSyns.flatMap(_._2).flatten.map(_.stem).toSet
+
+                val nonEmptySuggs = allSuggs.asScala.map(p ⇒ p._1 → 
p._2.asScala).filter(_._2.nonEmpty)
+
+                val res = mutable.HashMap.empty[String, 
mutable.ArrayBuffer[SuggestionResult]]
+
+                nonEmptySuggs.
+                    foreach { case (elemId, elemSuggs) ⇒
+                        elemSuggs.
+                            map(sugg ⇒ (sugg, toStem(sugg.word))).
+                            groupBy { case (_, stem) ⇒ stem }.
+                            // Drops already defined.
+                            filter { case (stem, _) ⇒ 
!allSynsStems.contains(stem) }.
+                            map { case (_, group) ⇒
+                                val seq = group.map { case (sugg, _) ⇒ sugg 
}.sortBy(-_.score)
+
+                                // Drops repeated.
+                                (seq.head, seq.length)
+                            }.
+                            toSeq.
+                            map { case (sugg, cnt) ⇒ (sugg, cnt, sugg.score * 
cnt / elemSuggs.size) }.
+                            sortBy { case (_, _, sumFactor) ⇒ -sumFactor }.
+                            zipWithIndex.
+                            foreach { case ((sugg, cnt, _), _) ⇒
+                                val seq =
+                                    res.get(elemId) match {
+                                        case Some(seq) ⇒ seq
+                                        case None ⇒
+                                            val buf = 
mutable.ArrayBuffer.empty[SuggestionResult]
+
+                                            res += elemId → buf
+
+                                            buf
+                                    }
+
+                                seq += SuggestionResult(sugg.word, sugg.score, 
cnt)
+                            }
+                    }
+
+                logger.whenInfoEnabled({
+                    var i = 1
+
+                    debugs.groupBy(_._1.example).foreach { case (_, m) ⇒
+                        m.toSeq.sortBy(_._1.sentence).foreach { case (req, 
suggs) ⇒
+                            val s =
+                                split(req.sentence).
+                                    zipWithIndex.map { case (w, i) ⇒ if (i == 
req.index) s"<<<$w>>>" else w }.
+                                    mkString(" ")
+
+                            logger.info(
+                                s"$i. " +
+                                    s"Request=$s, " +
+                                    
s"suggestions=[${suggs.map(_.word).mkString(", ")}], " +
+                                    s"element=${req.elementId}"
+                            )
+
+                            i = i + 1
+                        }
+                    }
+                })
+
+                val resJs: util.Map[String, JList[util.HashMap[String, Any]]] =
+                    res.map { case (id, data) ⇒
+                        id → data.map(d ⇒ {
+                            val m = new util.HashMap[String, Any]()
+
+                            m.put("synonym", d.synonym)
+                            m.put("ctxWorldServerScore", d.ctxWorldServerScore)
+                            m.put("suggestedCount", d.suggestedCount)
+
+                            m
+                        }).asJava
+                    }.asJava
+
+                NCInspection(
+                    warnings = if (warns.isEmpty) None else Some(warns),
+                    suggestions = Some(Seq(resJs))
+                )
+            }
+        }
+}
diff --git 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/opencensus/NCOpenCensusServerStats.scala
 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/opencensus/NCOpenCensusServerStats.scala
index 7eca741..9148289 100644
--- 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/opencensus/NCOpenCensusServerStats.scala
+++ 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/opencensus/NCOpenCensusServerStats.scala
@@ -31,7 +31,7 @@ import io.opencensus.stats._
 trait NCOpenCensusServerStats {
     val M_ASK_LATENCY_MS: MeasureLong = MeasureLong.create("ask_latency", "The 
latency of '/ask' REST call", "ms")
     val M_CHECK_LATENCY_MS: MeasureLong = MeasureLong.create("check_latency", 
"The latency of '/check' REST call", "ms")
-    val M_MODEL_ENHANCE_LATENCY_MS: MeasureLong = 
MeasureLong.create("model_enhance_latency", "The latency of '/model/enhance' 
REST call", "ms")
+    val M_MODEL_INSPECT_LATENCY_MS: MeasureLong = 
MeasureLong.create("model_inspect_latency", "The latency of '/model/inspect' 
REST call", "ms")
     val M_CANCEL_LATENCY_MS: MeasureLong = 
MeasureLong.create("cancel_latency", "The latency of '/cancel' REST call", "ms")
     val M_SIGNIN_LATENCY_MS: MeasureLong = 
MeasureLong.create("signin_latency", "The latency of '/signin' REST call", "ms")
     val M_SIGNOUT_LATENCY_MS: MeasureLong = 
MeasureLong.create("signout_latency", "The latency of '/signout' REST call", 
"ms")
diff --git 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/probe/NCProbeManager.scala 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/probe/NCProbeManager.scala
index 6fab888..525ff5a 100644
--- 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/probe/NCProbeManager.scala
+++ 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/probe/NCProbeManager.scala
@@ -14,6 +14,22 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 
 package org.apache.nlpcraft.server.probe
 
@@ -23,12 +39,16 @@ import java.security.Key
 import java.util
 import java.util.Collections
 import java.util.concurrent.atomic.AtomicBoolean
-import java.util.concurrent.{ExecutorService, Executors}
+import java.util.concurrent.{ConcurrentHashMap, ExecutorService, Executors}
 
+import com.google.gson.Gson
+import com.google.gson.reflect.TypeToken
 import io.opencensus.trace.Span
 import org.apache.nlpcraft.common.ascii.NCAsciiTable
 import org.apache.nlpcraft.common.config.NCConfigurable
 import org.apache.nlpcraft.common.crypto.NCCipher
+import org.apache.nlpcraft.common.inspections.{NCInspection, NCInspectionType}
+import org.apache.nlpcraft.common.inspections.NCInspectionType._
 import org.apache.nlpcraft.common.nlp.NCNlpSentence
 import org.apache.nlpcraft.common.nlp.core.NCNlpCoreManager
 import org.apache.nlpcraft.common.socket.NCSocket
@@ -45,13 +65,16 @@ import org.apache.nlpcraft.server.sql.NCSql
 import scala.collection.JavaConverters._
 import scala.collection.{Map, mutable}
 import scala.concurrent.ExecutionContext.Implicits.global
-import scala.concurrent.Future
+import scala.concurrent.{Future, Promise}
 import scala.util.{Failure, Success}
 
 /**
   * Probe manager.
   */
 object NCProbeManager extends NCService {
+    private final val GSON = new Gson()
+    private val TYPE_INSPECTION_RESP = new TypeToken[ util.Map[String, 
util.Map[String, AnyRef]]]() {}.getType
+
     // Type safe and eager configuration container.
     private[probe] object Config extends NCConfigurable {
         final private val pre = "nlpcraft.server.probe"
@@ -138,7 +161,9 @@ object NCProbeManager extends NCService {
 
     @volatile private var pool: ExecutorService = _
     @volatile private var isStopping: AtomicBoolean = _
-    
+
+    @volatile private var inspections: ConcurrentHashMap[String, 
Promise[Map[NCInspectionType, NCInspection]]] = _
+
     /**
       *
       * @return
@@ -157,6 +182,8 @@ object NCProbeManager extends NCService {
         )
     
         isStopping = new AtomicBoolean(false)
+
+        inspections = new ConcurrentHashMap[String, 
Promise[Map[NCInspectionType, NCInspection]]]()
         
         pool = Executors.newFixedThreadPool(Config.poolSize)
         
@@ -195,6 +222,8 @@ object NCProbeManager extends NCService {
         U.stopThread(pingSrv)
         U.stopThread(dnSrv)
         U.stopThread(upSrv)
+
+        inspections = null
      
         super.stop()
     }
@@ -438,8 +467,8 @@ object NCProbeManager extends NCService {
                             t.interrupt()
                     
                         case e: Throwable ⇒
-                            logger.info(s"Error reading probe downlink socket 
(${e.getMessage}): $probeKey")
-                        
+                            logger.info(s"Error reading probe downlink socket 
(${e.getMessage}): $probeKey", e)
+
                             t.interrupt()
                     }
             }
@@ -579,37 +608,25 @@ object NCProbeManager extends NCService {
                             String,
                             String,
                             String,
-                            java.util.Set[String],
-                            java.util.Map[String, String],
-                            java.util.Map[String, java.util.List[String]],
-                            java.util.Map[String, java.util.List[String]]
+                            java.util.Set[String]
                         )]]("PROBE_MODELS").
                         map {
                             case (
                                 mdlId,
                                 mdlName,
                                 mdlVer,
-                                enabledBuiltInToks,
-                                macros,
-                                elementsSynonyms,
-                                intentsSamples
+                                enabledBuiltInToks
                             ) ⇒
                                 require(mdlId != null)
                                 require(mdlName != null)
                                 require(mdlVer != null)
                                 require(enabledBuiltInToks != null)
-                                require(macros != null)
-                                require(elementsSynonyms != null)
-                                require(intentsSamples != null)
 
                                 NCProbeModelMdo(
                                     id = mdlId,
                                     name = mdlName,
                                     version = mdlVer,
-                                    enabledBuiltInTokens = 
enabledBuiltInToks.asScala.toSet,
-                                    macros = macros.asScala.toMap,
-                                    elementsSynonyms = 
elementsSynonyms.asScala.map(p ⇒ p._1 → p._2.asScala).toMap,
-                                    intentsSamples = 
intentsSamples.asScala.map(p ⇒ p._1 → p._2.asScala).toMap
+                                    enabledBuiltInTokens = 
enabledBuiltInToks.asScala.toSet
                                 )
                         }.toSet
 
@@ -684,6 +701,21 @@ object NCProbeManager extends NCService {
             
             typ match {
                 case "P2S_PING" ⇒ ()
+
+                case "P2S_MODEL_INSPECTION" ⇒
+                    val promise = 
inspections.remove(probeMsg.data[String]("reqGuid"))
+
+                    if (promise != null) {
+                        val respJs: util.Map[String, util.Map[String, AnyRef]] 
=
+                            GSON.fromJson(probeMsg.data[String]("resp"), 
TYPE_INSPECTION_RESP)
+
+                        val resp =
+                            respJs.asScala.map { case (k, v) ⇒
+                                NCInspectionType.withName(k.toUpperCase) → 
NCInspection.deserialize(v)
+                            }
+
+                        promise.success(resp)
+                    }
                 
                 case "P2S_ASK_RESULT" ⇒
                     val srvReqId = probeMsg.data[String]("srvReqId")
@@ -950,7 +982,6 @@ object NCProbeManager extends NCService {
       *
       * @param mdlId Model ID.
       * @param parent Optional parent span.
-      * @return
       */
     def getModel(mdlId: String, parent: Span = null): NCProbeModelMdo =
         startScopedSpan("getModel", parent, "modelId" → mdlId) { _ ⇒
@@ -958,4 +989,25 @@ object NCProbeManager extends NCService {
                 mdls.getOrElse(mdlId, throw new NCE(s"Unknown model ID: 
$mdlId"))
             }
         }
+
+    def inspect(mdlId: String, types: Seq[NCInspectionType], parent: Span = 
null): Future[Map[NCInspectionType, NCInspection]] =
+        startScopedSpan("inspect", parent, "modelId" → mdlId, "types" → 
types.map(_.toString)) { _ ⇒
+            getProbeForModelId(mdlId) match {
+                case Some(probe) ⇒
+                    val msg = NCProbeMessage(
+                        "S2P_MODEL_INSPECTION",
+                        "mdlId" → mdlId,
+                        "types" → new 
java.util.ArrayList(types.map(_.toString).asJava)
+                    )
+
+                    val promise = Promise[Map[NCInspectionType, 
NCInspection]]()
+
+                    inspections.put(msg.getGuid, promise)
+
+                    sendToProbe(probe.probeKey, msg, parent)
+
+                    promise.future
+                case None ⇒ throw new NCE(s"Probe not found for model: $mdlId")
+            }
+        }
 }
diff --git 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/rest/NCBasicRestApi.scala 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/rest/NCBasicRestApi.scala
index 5785f30..9f5f992 100644
--- 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/rest/NCBasicRestApi.scala
+++ 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/rest/NCBasicRestApi.scala
@@ -36,7 +36,7 @@ import 
org.apache.nlpcraft.server.apicodes.NCApiStatusCode.{API_OK, _}
 import org.apache.nlpcraft.server.company.NCCompanyManager
 import org.apache.nlpcraft.server.feedback.NCFeedbackManager
 import org.apache.nlpcraft.server.mdo.{NCQueryStateMdo, NCUserMdo}
-import org.apache.nlpcraft.server.model.{NCEnhanceManager, NCEnhanceType}
+import org.apache.nlpcraft.server.model.NCServerInspectorManager
 import org.apache.nlpcraft.server.opencensus.NCOpenCensusServerStats
 import org.apache.nlpcraft.server.probe.NCProbeManager
 import org.apache.nlpcraft.server.query.NCQueryManager
@@ -48,6 +48,7 @@ import scala.collection.JavaConverters._
 import scala.concurrent.ExecutionContext.Implicits.global
 import scala.concurrent.Future
 import akka.http.scaladsl.coding.Coders
+import org.apache.nlpcraft.common.inspections.NCInspectionType
 
 /**
   * REST API default implementation.
@@ -627,63 +628,53 @@ class NCBasicRestApi extends NCRestApi with LazyLogging 
with NCOpenCensusTrace w
       *
       * @return
       */
-    protected def modelEnhance$(): Route = {
-        case class Req(
-            acsTok: String,
-            mdlId: String,
-            types: Seq[String]
-        )
-
-        implicit val reqFmt: RootJsonFormat[Req] = jsonFormat3(Req)
-
-        entity(as[Req]) { req ⇒
-            startScopedSpan("modelEnhance$", "mdlId" → req.mdlId, "acsTok" → 
req.acsTok) { span ⇒
-                checkLength("acsTok", req.acsTok, 256)
-                checkLength("mdlId", req.mdlId, 32)
-
-                val types =
-                    if (req.types.size == 1 && req.types.head.toLowerCase == 
"all")
-                        NCEnhanceType.values.toSeq
-                    else
-                        req.types.map(typ ⇒
-                            try
-                                NCEnhanceType.withName(typ.toUpperCase)
-                            catch {
-                                case _: Exception ⇒ throw InvalidField("types")
-                            }
-                        )
-
-                val admin = authenticateAsAdmin(req.acsTok)
+    protected def inspect$(reqJs: JsValue): Future[String] = {
+        val obj = reqJs.asJsObject()
 
-                if (!NCProbeManager.getAllProbes(admin.companyId, 
span).exists(_.models.exists(_.id == req.mdlId)))
-                    throw new NCE(s"Probe not found for model: ${req.mdlId}")
+        def getOpt[T](name: String, convert: JsValue ⇒ T): Option[T] =
+            obj.fields.get(name) match {
+                case Some(v) ⇒ Some(convert(v))
+                case None ⇒ None
+            }
 
-                val res =
-                    NCEnhanceManager.
-                        enhance(req.mdlId, types, span).
-                        map(resp ⇒ {
-                            // We don't use internal case class here because 
GSON can use only public classes.
-                            // So, we use HashMap.
-                            val m = new util.HashMap[String, Object]()
+        val acsTok = obj.fields("acsTok").convertTo[String]
+        val mdlId = obj.fields("mdlId").convertTo[String]
+        val types = obj.fields("types").convertTo[Seq[String]]
 
-                            m.put("enhanceType", resp.enhanceType.toString)
+        startScopedSpan("modelEnhance$", "mdlId" → mdlId, "acsTok" → acsTok) { 
span ⇒
+            checkLength("acsTok", acsTok, 256)
+            checkLength("mdlId", mdlId, 32)
 
-                            if (resp.errors.isDefined)
-                                m.put("errors", resp.errors.get.asJava)
-                            if (resp.warnings.isDefined)
-                                m.put("warnings", resp.warnings.get.asJava)
-                            if (resp.suggestions.isDefined)
-                                m.put("suggestions", resp.suggestions.get)
+            val typesVals =
+                if (types.size == 1 && types.head.toLowerCase == "all")
+                    NCInspectionType.values.toSeq
+                else
+                    types.map(typ ⇒
+                        try
+                            NCInspectionType.withName(typ.toUpperCase)
+                        catch {
+                            case _: Exception ⇒ throw InvalidField("types")
+                        }
+                    )
 
-                            m
+            val admin = authenticateAsAdmin(acsTok)
 
-                        }).asJava
+            if (!NCProbeManager.getAllProbes(admin.companyId, 
span).exists(_.models.exists(_.id == mdlId)))
+                throw new NCE(s"Probe not found for model: $mdlId")
 
-                complete(
-                    HttpResponse(
-                        entity = HttpEntity(ContentTypes.`application/json`, 
GSON.toJson(res))
+            NCServerInspectorManager.
+                inspect(mdlId, typesVals, span).collect {
+                    // We have to use GSON (not spray) here to serialize 
`result` field.
+                    case res ⇒
+                        val m = new util.HashMap[String, AnyRef](
+                            res.map { case (typ, inspection) ⇒ typ.toString → 
inspection.serialize() }.asJava
+                        )
+                        GSON.toJson(
+                            Map(
+                                "status" → API_OK.toString,
+                                "result" → m
+                            ).asJava
                     )
-                )
             }
         }
     }
@@ -1795,7 +1786,6 @@ class NCBasicRestApi extends NCRestApi with LazyLogging 
with NCOpenCensusTrace w
                                 path(API / "signout") { 
withLatency(M_SIGNOUT_LATENCY_MS, signout$) } ~ {
                                 path(API / "cancel") { 
withLatency(M_CANCEL_LATENCY_MS, cancel$) } ~
                                 path(API / "check") { 
withLatency(M_CHECK_LATENCY_MS, check$) } ~
-                                path(API / "model"/ "enhance") { 
withLatency(M_MODEL_ENHANCE_LATENCY_MS, modelEnhance$) } ~
                                 path(API / "clear"/ "conversation") { 
withLatency(M_CLEAR_CONV_LATENCY_MS, clear$Conversation) } ~
                                 path(API / "clear"/ "dialog") { 
withLatency(M_CLEAR_DIALOG_LATENCY_MS, clear$Dialog) } ~
                                 path(API / "company"/ "add") { 
withLatency(M_COMPANY_ADD_LATENCY_MS, company$Add) } ~
@@ -1815,6 +1805,14 @@ class NCBasicRestApi extends NCRestApi with LazyLogging 
with NCOpenCensusTrace w
                                 path(API / "feedback" / "delete") { 
withLatency(M_FEEDBACK_DELETE_LATENCY_MS, feedback$Delete) } ~
                                 path(API / "probe" / "all") { 
withLatency(M_PROBE_ALL_LATENCY_MS, probe$All) } ~
                                 path(API / "ask") { 
withLatency(M_ASK_LATENCY_MS, ask$) } ~
+                                (path(API / "model" / "inspect") &
+                                    entity(as[JsValue])
+                                ) {
+                                    req ⇒
+                                        
onSuccess(withLatency(M_MODEL_INSPECT_LATENCY_MS, inspect$(req))) {
+                                            js ⇒ complete(HttpResponse(entity 
= HttpEntity(ContentTypes.`application/json`, js)))
+                                        }
+                                } ~
                                 (path(API / "ask" / "sync") &
                                     entity(as[JsValue]) &
                                     optionalHeaderValueByName("User-Agent") &

Reply via email to