This is an automated email from the ASF dual-hosted git repository.
aradzinski pushed a commit to branch NLPCRAFT-41
in repository https://gitbox.apache.org/repos/asf/incubator-nlpcraft.git
The following commit(s) were added to refs/heads/NLPCRAFT-41 by this push:
new fae2ab4 WIP.
fae2ab4 is described below
commit fae2ab43c01561e301bc4453f9167a14a6a811ce
Author: Aaron Radzinski <[email protected]>
AuthorDate: Sat Sep 12 02:36:36 2020 -0700
WIP.
---
nlpcraft/src/main/resources/nlpcraft.conf | 57 ++++++----------------
.../org/apache/nlpcraft/probe/NCProbeBoot.scala | 33 ++++---------
.../probe/mgrs/deploy/NCDeployManager.scala | 15 +++---
3 files changed, 31 insertions(+), 74 deletions(-)
diff --git a/nlpcraft/src/main/resources/nlpcraft.conf
b/nlpcraft/src/main/resources/nlpcraft.conf
index e57127c..a30a2c6 100644
--- a/nlpcraft/src/main/resources/nlpcraft.conf
+++ b/nlpcraft/src/main/resources/nlpcraft.conf
@@ -230,47 +230,20 @@ nlpcraft {
# Safely ignored if 'null' - but then 'models' should have at least
one element.
jarsFolder = null
- # Specifies a *single* fully qualified model class name for the probe
to start with.
+ # Specifies fully qualifies model class names for the probe to start
with.
#
- # NOTE:
- # -----
- # There are separate configurations for single ('model') or multiple
models ('models')
- # for the probe to start with. It is done so for easy HOCON overriding
via environment variables
- # due to HOCON limitations in dealing with array value overriding:
- #
- # - If this value is specified the 'models' value is ignored.
- # - If this value is 'null' then the 'models' configuration will be
taken in.
- # - Class name must be on the active class path for the probe.
- # - Both 'model' and 'models' can be empty but then 'jarsFolder'
must be provided.
- #
- model = null
+ # Note that following models require 'google' on the server side.
+ # See https://nlpcraft.apache.org/integrations.html#nlp for more
details
+ # on how to configure 3rd party token providers:
+ # - "org.apache.nlpcraft.examples.phone.PhoneModel"
+ models =
+ """org.apache.nlpcraft.examples.alarm.AlarmModel,
+ org.apache.nlpcraft.examples.echo.EchoModel,
+ org.apache.nlpcraft.examples.helloworld.HelloWorldModel,
+ org.apache.nlpcraft.examples.time.TimeModel,
+ org.apache.nlpcraft.examples.weather.WeatherModel,
+ org.apache.nlpcraft.examples.lightswitch.LightSwitchModel"""
- # Specifies *multiple* fully qualifies model class names for the probe
to start with.
- #
- # NOTE:
- # -----
- # There are separate configurations for single ('model') or multiple
models ('models')
- # for the probe to start with. It is done so for easy HOCON overriding
via environment variables
- # due to HOCON limitations in dealing with array value overriding:
- #
- # - This configuration is ignored if 'model' property is set (not
'null').
- # - Class names must be on the active class path for the probe.
- # - Both 'model' and 'models' can be empty but then 'jarsFolder'
must be provided.
- #
- models = [
- # Example of listing models for probe to start with.
- "org.apache.nlpcraft.examples.alarm.AlarmModel",
- "org.apache.nlpcraft.examples.echo.EchoModel",
- "org.apache.nlpcraft.examples.helloworld.HelloWorldModel",
- "org.apache.nlpcraft.examples.time.TimeModel",
- "org.apache.nlpcraft.examples.weather.WeatherModel",
- "org.apache.nlpcraft.examples.lightswitch.LightSwitchModel"
-
- # Note that following models require 'google' on the server side.
- # See https://nlpcraft.apache.org/integrations.html#nlp for more
details
- # on how to configure 3rd party token providers:
- # - "org.apache.nlpcraft.examples.phone.PhoneModel"
- ]
# Specify class names for probe life cycle components.
# Each class should extend 'NCProbeLifecycle' interface and provide a
no-arg constructor.
@@ -311,12 +284,14 @@ nlpcraft {
resultMaxSizeBytes = 1048576
#
- # TODO
+ # Timeout in ms for conversation manager garbage collector.
+ # Reduce if you are experiencing a large memory utilization under the
load with many concurrent users.
#
convGcTimeoutMs = 60000
#
- # TODO
+ # Timeout in ms for dialog flow manager garbage collector.
+ # Reduce if you are experiencing a large memory utilization under the
load with many concurrent users.
#
dialogGcTimeoutMs = 60000
}
diff --git
a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/NCProbeBoot.scala
b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/NCProbeBoot.scala
index ed5c0f2..000caed 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/NCProbeBoot.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/NCProbeBoot.scala
@@ -69,12 +69,12 @@ private [probe] object NCProbeBoot extends LazyLogging with
NCOpenCensusTrace {
var upLink: (String, Integer),
var downLink: (String, Integer),
var jarsFolder: Option[String],
- var model: Option[String],
- var models: Seq[String],
+ var models: String,
var lifecycle: Seq[String]
) {
- def upLinkString = s"${upLink._1}:${upLink._2}"
- def downLinkString = s"${downLink._1}:${downLink._2}"
+ lazy val upLinkString = s"${upLink._1}:${upLink._2}"
+ lazy val downLinkString = s"${downLink._1}:${downLink._2}"
+ lazy val modelsSeq: Seq[String] = models.split(",").map(_.trim)
}
private def mkDefault(): Config = {
@@ -143,8 +143,7 @@ private [probe] object NCProbeBoot extends LazyLogging with
NCOpenCensusTrace {
val upLink: (String, Integer) = getHostPort(s"$prefix.upLink")
val downLink: (String, Integer) = getHostPort(s"$prefix.downLink")
val jarsFolder: Option[String] =
getStringOpt(s"$prefix.jarsFolder")
- val model: Option[String] = getStringOpt(s"$prefix.model")
- val models: Seq[String] = getStringList(s"$prefix.models")
+ val models: String = getString(s"$prefix.models")
val lifecycle: Seq[String] = getStringList(s"$prefix.lifecycle")
}
@@ -154,7 +153,6 @@ private [probe] object NCProbeBoot extends LazyLogging with
NCOpenCensusTrace {
Cfg.upLink,
Cfg.downLink,
Cfg.jarsFolder,
- Cfg.model,
Cfg.models,
Cfg.lifecycle
)
@@ -305,8 +303,7 @@ private [probe] object NCProbeBoot extends LazyLogging with
NCOpenCensusTrace {
val upLink: (String, Integer) = getHostPort(upLinkStr)
val dnLink: (String, Integer) = getHostPort(dnLinkStr)
val jarsFolder: Option[String] =
getStringOpt(s"$prefix.jarsFolder")
- val model: Option[String] = getStringOpt(s"$prefix.model")
- val models: Seq[String] = mdlClasses.map(_.getName).toSeq
+ val models: String = mdlClasses.map(_.getName).mkString(",")
val lifecycle: Seq[String] = getStringList(s"$prefix.lifecycle")
}
@@ -318,7 +315,6 @@ private [probe] object NCProbeBoot extends LazyLogging with
NCOpenCensusTrace {
Cfg.upLink,
Cfg.dnLink,
Cfg.jarsFolder,
- Cfg.model,
Cfg.models,
Cfg.lifecycle),
fut
@@ -378,19 +374,9 @@ private [probe] object NCProbeBoot extends LazyLogging
with NCOpenCensusTrace {
tbl += ("Down-Link", cfg.downLinkString)
tbl += ("Up-Link", cfg.upLinkString)
tbl += ("Lifecycle", cfg.lifecycle)
+ tbl += ("Models" , cfg.modelsSeq)
+ tbl += ("JARs Folder", cfg.jarsFolder.getOrElse(""))
- cfg.model match {
- case Some(m) ⇒
- tbl += ("Model", m)
- tbl += ("Models" , "<ignored>")
- tbl += ("JARs Folder", "<ignored>")
-
- case None ⇒
- tbl += ("Model", "<ignored>")
- tbl += ("Models" , cfg.models)
- tbl += ("JARs Folder", cfg.jarsFolder.getOrElse(""))
- }
-
tbl.info(logger, Some("Probe Configuration:"))
}
@@ -429,8 +415,7 @@ private [probe] object NCProbeBoot extends LazyLogging with
NCOpenCensusTrace {
"downlink" → cfg.downLinkString,
"relVer" → ver.version,
"relDate" → ver.date.toString,
- "model" → cfg.model.getOrElse(""),
- "models" → cfg.models.mkString(","),
+ "models" → cfg.models,
"lifecycle" → cfg.lifecycle.mkString(","),
"jarFolder" → cfg.jarsFolder
)
diff --git
a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/deploy/NCDeployManager.scala
b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/deploy/NCDeployManager.scala
index b662176..5cea563 100644
---
a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/deploy/NCDeployManager.scala
+++
b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/deploy/NCDeployManager.scala
@@ -90,7 +90,7 @@ object NCDeployManager extends NCService with DecorateAsScala
{
def modelFactoryType: Option[String] =
getStringOpt(s"$pre.modelFactory.type")
def modelFactoryProps: Option[Map[String, String]] =
getMapOpt(s"$pre.modelFactory.properties")
def model: Option[String] = getStringOpt(s"$pre.model")
- def models: Seq[String] = getStringList(s"$pre.models")
+ def models: String = getString(s"$pre.models")
def jarsFolder: Option[String] = getStringOpt(s"$pre.jarsFolder")
}
@@ -137,7 +137,7 @@ object NCDeployManager extends NCService with
DecorateAsScala {
mdl.getMacros.asScala.keys.foreach(makro ⇒
if (!allSyns.exists(_.contains(makro)))
- logger.warn(s"Unused macro [mdlId=$mdlId, macro=$makro]")
+ logger.warn(s"Unused macro detected [mdlId=$mdlId,
macro=$makro]")
)
val parser = new NCMacroParser
@@ -471,7 +471,7 @@ object NCDeployManager extends NCService with
DecorateAsScala {
)
}
else
- logger.warn(s"Model has no defined intents [mdlId=$mdlId]")
+ logger.warn(s"Model has no intents [mdlId=$mdlId]")
NCModelData(
model = mdl,
@@ -621,7 +621,7 @@ object NCDeployManager extends NCService with
DecorateAsScala {
case None ⇒ // No-op.
}
- data ++= Config.models.map(makeModelWrapper)
+ data ++=
Config.models.split(",").map(_.trim).map(makeModelWrapper)
Config.jarsFolder match {
case Some(jarsFolder) ⇒
@@ -1490,9 +1490,6 @@ object NCDeployManager extends NCService with
DecorateAsScala {
None
}).toMap
- if (!annFound)
- logger.warn(s"No intents found [mdlId=${mdl.getId}")
-
val parser = new NCMacroParser
mdl.getMacros.asScala.foreach { case (name, str) ⇒
parser.addMacro(name, str) }
@@ -1512,9 +1509,9 @@ object NCDeployManager extends NCService with
DecorateAsScala {
val seq: Seq[String] = sNorm.split("
").map(NCNlpPorterStemmer.stem)
if (!allSyns.exists(_.intersect(seq).nonEmpty))
- logger.warn(s"Intent sample doesn't contain any direct
synonyms [" +
+ logger.warn(s"@IntentSample sample doesn't contain any
direct synonyms [" +
s"mdlId=$mdlId, " +
- s"sample=$s" +
+ s"sample='$s'" +
s"]")
}