This is an automated email from the ASF dual-hosted git repository.

rabbah pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/openwhisk-runtime-python.git


The following commit(s) were added to refs/heads/master by this push:
     new 90a8e85  Updated Python runtimes to use "Action Loop" Proxy with new 
async handshake (#82)
90a8e85 is described below

commit 90a8e853dcf1db761ab51da7a6cc78ff749b66d3
Author: Michele Sciabarra <[email protected]>
AuthorDate: Wed Feb 19 15:08:27 2020 +0100

    Updated Python runtimes to use "Action Loop" Proxy with new async handshake 
(#82)
---
 .gitignore                                         |   2 +-
 README.md                                          |   1 +
 .../Dockerfile                                     |  66 +-
 core/python2ActionLoop/Makefile                    |  31 +
 core/python2ActionLoop/bin/compile                 | 111 +++
 .../build.gradle                                   |   2 +-
 .../lib/launcher.py}                               |  33 +-
 core/{pythonAction => python3Action}/CHANGELOG.md  |   0
 core/{pythonAction => python3Action}/Dockerfile    |   0
 core/{pythonAction => python3Action}/build.gradle  |   0
 .../pythonrunner.py                                |   0
 .../Dockerfile                                     |  27 +-
 core/python3ActionLoop/Makefile                    |  31 +
 core/python3ActionLoop/bin/compile                 | 113 +++
 .../build.gradle                                   |   0
 .../lib/launcher.py}                               |  13 +-
 core/python3AiActionLoop/Dockerfile                |  87 ++
 core/python3AiActionLoop/Makefile                  |  32 +
 core/python3AiActionLoop/README.md                 |  85 ++
 core/python3AiActionLoop/bin/compile               | 113 +++
 .../build.gradle                                   |   2 +-
 .../lib/launcher.py}                               |  13 +-
 core/python3AiActionLoop/requirements.txt          |  44 ++
 .../samples/smart-body-crop/.gitignore             |   5 +
 .../samples/smart-body-crop/common.py              | 332 ++++++++
 .../samples/smart-body-crop/crop.ipynb             | 872 +++++++++++++++++++++
 .../samples/smart-body-crop/fashion-men-1.jpg      | Bin 0 -> 2471074 bytes
 .../samples/smart-body-crop/inference.py           | 246 ++++++
 core/pythonActionLoop/pythonbuild.py               | 110 ---
 settings.gradle                                    |   7 +-
 .../Python2ActionLoopContainerTests.scala          |  31 +-
 .../Python3AiActionLoopContainerTests.scala        | 155 ++++
 .../PythonActionContainerTests.scala               |  47 +-
 .../PythonActionLoopContainerTests.scala           |  19 +-
 .../PythonActionLoopExtraTests.scala               |  95 +++
 35 files changed, 2493 insertions(+), 232 deletions(-)

diff --git a/.gitignore b/.gitignore
index 4f7e0aa..84a0798 100644
--- a/.gitignore
+++ b/.gitignore
@@ -18,7 +18,7 @@ results
 !/ansible/environments/mac
 
 # Eclipse
-bin/
+#bin/
 **/.project
 .settings/
 .classpath
diff --git a/README.md b/README.md
index 76c0b70..9bfcf6e 100644
--- a/README.md
+++ b/README.md
@@ -113,3 +113,4 @@ Using IntelliJ:
 -Dhttp.proxyHost=localhost
 -Dhttp.proxyPort=3128
 ```
+
diff --git a/core/pythonActionLoop/Dockerfile 
b/core/python2ActionLoop/Dockerfile
similarity index 50%
copy from core/pythonActionLoop/Dockerfile
copy to core/python2ActionLoop/Dockerfile
index c4f3959..2d3e278 100644
--- a/core/pythonActionLoop/Dockerfile
+++ b/core/python2ActionLoop/Dockerfile
@@ -17,7 +17,11 @@
 
 # build go proxy from source
 FROM golang:1.12 AS builder_source
-RUN env CGO_ENABLED=0 go get github.com/apache/openwhisk-runtime-go/main && mv 
/go/bin/main /bin/proxy
+#RUN env CGO_ENABLED=0 go get github.com/apache/openwhisk-runtime-go/main && 
mv /go/bin/main /bin/proxy
+RUN git clone --branch dev \
+   https://github.com/nimbella-corp/openwhisk-runtime-go /src ;\
+   cd /src ; env GO111MODULE=on CGO_ENABLED=0 go build main/proxy.go && \
+   mv proxy /bin/proxy
 
 # or build it from a release
 FROM golang:1.12 AS builder_release
@@ -28,32 +32,54 @@ RUN curl -sL \
   && cd openwhisk-runtime-go-*/main\
   && GO111MODULE=on go build -o /bin/proxy
 
-FROM python:3.7-stretch
-
+FROM python:2.7-alpine
 # select the builder to use
-ARG GO_PROXY_BUILD_FROM=release
+ARG GO_PROXY_BUILD_FROM=source
+
+# Upgrade and install basic Python dependencies
+RUN apk add --no-cache \
+        bash \
+        bzip2-dev \
+        gcc \
+        libc-dev \
+        libxslt-dev \
+        libxml2-dev \
+        libffi-dev \
+        linux-headers \
+        openssl-dev \
+        python-dev
 
 # Install common modules for python
-RUN pip install \
-    beautifulsoup4==4.6.3 \
-    httplib2==0.11.3 \
-    kafka_python==1.4.3 \
-    lxml==4.2.5 \
-    python-dateutil==2.7.3 \
-    requests==2.19.1 \
-    scrapy==1.5.1 \
-    simplejson==3.16.0 \
-    virtualenv==16.0.0 \
-    twisted==18.7.0
+RUN pip install --no-cache-dir --upgrade pip setuptools six \
+ && pip install --no-cache-dir \
+        gevent==1.3.6 \
+        flask==1.0.2 \
+        beautifulsoup4==4.6.3 \
+        httplib2==0.11.3 \
+        kafka_python==1.4.3 \
+        lxml==4.2.5 \
+        python-dateutil==2.7.3 \
+        requests==2.19.1 \
+        scrapy==1.5.1 \
+        simplejson==3.16.0 \
+        virtualenv==16.0.0 \
+        twisted==18.7.0 \
+        signalfx_lambda==0.2.1
 
 RUN mkdir -p /action
 WORKDIR /
 COPY --from=builder_source /bin/proxy /bin/proxy_source
 COPY --from=builder_release /bin/proxy /bin/proxy_release
 RUN mv /bin/proxy_${GO_PROXY_BUILD_FROM} /bin/proxy
-ADD pythonbuild.py /bin/compile
-ADD pythonbuild.py.launcher.py /bin/compile.launcher.py
-ENV OW_COMPILER=/bin/compile
-ENTRYPOINT []
-CMD ["/bin/proxy"]
 
+ADD bin/compile /bin/compile
+ADD lib/launcher.py /lib/launcher.py
+# the compiler script
+ENV OW_COMPILER=/bin/compile
+# log initialization errors
+ENV OW_LOG_INIT_ERROR=1
+# the launcher must wait for an ack
+ENV OW_WAIT_FOR_ACK=1
+# using the runtime name to identify the execution environment
+ENV OW_EXECUTION_ENV=openwhisk/action-python-v2.7
+ENTRYPOINT ["/bin/proxy"]
diff --git a/core/python2ActionLoop/Makefile b/core/python2ActionLoop/Makefile
new file mode 100644
index 0000000..4bae7ae
--- /dev/null
+++ b/core/python2ActionLoop/Makefile
@@ -0,0 +1,31 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+IMG=whisk/actionloop-python-v2.7:latest
+
+build:
+       docker build -t $(IMG) -f Dockerfile .
+
+clean:
+       docker rmi -f $(IMG)
+
+debug:
+       docker run -p 8080:8080 \
+       -ti --entrypoint=/bin/bash -v $(PWD):/mnt \
+       -e OW_COMPILER=/mnt/bin/compile \
+       $(IMG)
+
+.PHONY: build clean  debug
diff --git a/core/python2ActionLoop/bin/compile 
b/core/python2ActionLoop/bin/compile
new file mode 100755
index 0000000..62b55e7
--- /dev/null
+++ b/core/python2ActionLoop/bin/compile
@@ -0,0 +1,111 @@
+#!/usr/bin/env python
+"""Python Action Builder
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+"""
+
+from __future__ import print_function
+import os, os.path, sys, imp, ast, shutil, subprocess, traceback
+from os.path import abspath, exists, dirname
+
+# write a file creating intermediate directories
+def write_file(file, body, executable=False):
+    try: os.makedirs(dirname(file), mode=0o755)
+    except: pass
+    with open(file, mode="wb") as f:
+        f.write(body)
+    if executable:
+        os.chmod(file, 0o755)
+
+# copy a file eventually replacing a substring
+def copy_replace(src, dst, match=None, replacement=""):
+    with open(src, 'rb') as s:
+        body = s.read()
+        if match:
+            body = body.replace(match, replacement)
+        write_file(dst, body)
+
+# assemble sources
+def sources(launcher, main, src_dir):
+    # move exec in the right place if exists
+    src_file = "%s/exec" % src_dir
+    if exists(src_file):
+        os.rename(src_file, "%s/__main__.py" % src_dir)
+    if exists("%s/__main__.py" % src_dir):
+        os.rename("%s/__main__.py" % src_dir, "%s/main__.py" % src_dir)
+
+    # write the boilerplate in a temp dir
+    copy_replace(launcher, "%s/exec__.py" % src_dir,
+          "from main__ import main as main",
+          "from main__ import %s as main" % main )
+
+# compile sources
+def build(src_dir, tgt_dir):
+    # in general, compile your program into an executable format
+    # for scripting languages, move sources and create a launcher
+    # move away the action dir and replace with the new
+    shutil.rmtree(tgt_dir)
+    shutil.move(src_dir, tgt_dir)
+    tgt_file = "%s/exec" % tgt_dir
+    write_file(tgt_file, """#!/bin/bash
+if [ "$(cat $0.env)" = "$__OW_EXECUTION_ENV" ]
+then cd "$(dirname $0)"
+     exec /usr/local/bin/python exec__.py "$@"
+else echo "Execution Environment Mismatch"
+     echo "Expected: $(cat $0.env)"
+     echo "Actual: $__OW_EXECUTION_ENV"
+     exit 1
+fi
+""", True)
+    write_file("%s.env"%tgt_file, os.environ['__OW_EXECUTION_ENV'])
+    return tgt_file
+
+#check if a module exists
+def check(tgt_dir, module_name):
+    try:
+
+        # find module
+        mod = imp.find_module(module_name, [tgt_dir])
+        # parse module
+        ast.parse(mod[0].read())
+        # check virtualenv
+        path_to_virtualenv = abspath('%s/virtualenv' % tgt_dir)
+        if os.path.isdir(path_to_virtualenv):
+            activate_this_file = path_to_virtualenv + '/bin/activate_this.py'
+            if not os.path.exists(activate_this_file):
+               sys.stderr.write('Invalid virtualenv. Zip file does not include 
activate_this.py')
+    except ImportError:
+        sys.stderr.write("Zip file does not include %s" % module_name)
+    except SyntaxError as er:
+        sys.stderr.write(er.msg)
+    except Exception as ex:
+        sys.stderr.write(ex.message)
+    sys.stderr.flush()
+
+if __name__ == '__main__':
+    if len(sys.argv) < 4:
+        sys.stdout.write("usage: <main-function> <source-dir> <target-dir>\n")
+        sys.stdout.flush()
+        sys.exit(1)
+    launcher = "%s/lib/launcher.py" % dirname(dirname(sys.argv[0]))
+    src_dir = abspath(sys.argv[2])
+    tgt_dir = abspath(sys.argv[3])
+    sources(launcher, sys.argv[1], src_dir)
+    tgt = build(abspath(sys.argv[2]), tgt_dir)
+    check(tgt_dir, "main__")
+    sys.stdout.flush()
+    sys.stderr.flush()
diff --git a/core/pythonAction/build.gradle 
b/core/python2ActionLoop/build.gradle
similarity index 94%
copy from core/pythonAction/build.gradle
copy to core/python2ActionLoop/build.gradle
index f120d86..66ecc3a 100644
--- a/core/pythonAction/build.gradle
+++ b/core/python2ActionLoop/build.gradle
@@ -15,5 +15,5 @@
  * limitations under the License.
  */
 
-ext.dockerImageName = 'python3action'
+ext.dockerImageName = 'actionloop-python-v2.7'
 apply from: '../../gradle/docker.gradle'
diff --git a/core/pythonActionLoop/pythonbuild.py.launcher.py 
b/core/python2ActionLoop/lib/launcher.py
similarity index 79%
copy from core/pythonActionLoop/pythonbuild.py.launcher.py
copy to core/python2ActionLoop/lib/launcher.py
index 360cb55..3204c9c 100755
--- a/core/pythonActionLoop/pythonbuild.py.launcher.py
+++ b/core/python2ActionLoop/lib/launcher.py
@@ -15,11 +15,11 @@
 # limitations under the License.
 #
 from __future__ import print_function
-from sys import stdin
-from sys import stdout
-from sys import stderr
 from os import fdopen
-import sys, os, json, traceback, warnings
+import sys, os, codecs, json, traceback, warnings
+
+sys.stdout = codecs.getwriter('utf8')(sys.stdout)
+sys.stderr = codecs.getwriter('utf8')(sys.stderr)
 
 try:
   # if the directory 'virtualenv' is extracted out of a zip file
@@ -39,36 +39,33 @@ except Exception:
   sys.exit(1)
 
 # now import the action as process input/output
-warnings.filterwarnings("ignore")
 from main__ import main as main
-warnings.resetwarnings()
-
-# if there are some arguments exit immediately
-if len(sys.argv) >1:
-  sys.stderr.flush()
-  sys.stdout.flush()
-  sys.exit(0)
 
 env = os.environ
 out = fdopen(3, "wb")
+if os.getenv("__OW_WAIT_FOR_ACK", "") != "":
+  out.write(json.dumps({"ok": True}, ensure_ascii=False).encode('utf-8'))
+  out.write(b'\n')
+  out.flush()
 while True:
-  line = stdin.readline()
+  line = sys.stdin.readline().decode('utf-8')
   if not line: break
   args = json.loads(line)
   payload = {}
   for key in args:
-    if key == "value":
+    akey = key.encode("ascii", "ignore")
+    if akey == "value":
       payload = args["value"]
     else:
-      env["__OW_%s" % key.upper()]= args[key]
+      env["__OW_%s" % akey.upper()] = args[key].encode("ascii", "ignore")
   res = {}
   try:
     res = main(payload)
   except Exception as ex:
-    print(traceback.format_exc(), file=stderr)
+    print(traceback.format_exc(), file=sys.stderr)
     res = {"error": str(ex)}
   out.write(json.dumps(res, ensure_ascii=False).encode('utf-8'))
   out.write(b'\n')
-  stdout.flush()
-  stderr.flush()
+  sys.stdout.flush()
+  sys.stderr.flush()
   out.flush()
diff --git a/core/pythonAction/CHANGELOG.md b/core/python3Action/CHANGELOG.md
similarity index 100%
rename from core/pythonAction/CHANGELOG.md
rename to core/python3Action/CHANGELOG.md
diff --git a/core/pythonAction/Dockerfile b/core/python3Action/Dockerfile
similarity index 100%
rename from core/pythonAction/Dockerfile
rename to core/python3Action/Dockerfile
diff --git a/core/pythonAction/build.gradle b/core/python3Action/build.gradle
similarity index 100%
copy from core/pythonAction/build.gradle
copy to core/python3Action/build.gradle
diff --git a/core/pythonAction/pythonrunner.py 
b/core/python3Action/pythonrunner.py
similarity index 100%
rename from core/pythonAction/pythonrunner.py
rename to core/python3Action/pythonrunner.py
diff --git a/core/pythonActionLoop/Dockerfile 
b/core/python3ActionLoop/Dockerfile
similarity index 72%
rename from core/pythonActionLoop/Dockerfile
rename to core/python3ActionLoop/Dockerfile
index c4f3959..dee8c5f 100644
--- a/core/pythonActionLoop/Dockerfile
+++ b/core/python3ActionLoop/Dockerfile
@@ -14,10 +14,13 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-
 # build go proxy from source
 FROM golang:1.12 AS builder_source
-RUN env CGO_ENABLED=0 go get github.com/apache/openwhisk-runtime-go/main && mv 
/go/bin/main /bin/proxy
+#RUN env CGO_ENABLED=0 go get github.com/apache/openwhisk-runtime-go/main && 
mv /go/bin/main /bin/proxy
+RUN git clone --branch dev \
+   https://github.com/nimbella-corp/openwhisk-runtime-go /src ;\
+   cd /src ; env GO111MODULE=on CGO_ENABLED=0 go build main/proxy.go && \
+   mv proxy /bin/proxy
 
 # or build it from a release
 FROM golang:1.12 AS builder_release
@@ -29,9 +32,7 @@ RUN curl -sL \
   && GO111MODULE=on go build -o /bin/proxy
 
 FROM python:3.7-stretch
-
-# select the builder to use
-ARG GO_PROXY_BUILD_FROM=release
+ARG GO_PROXY_BUILD_FROM=source
 
 # Install common modules for python
 RUN pip install \
@@ -51,9 +52,17 @@ WORKDIR /
 COPY --from=builder_source /bin/proxy /bin/proxy_source
 COPY --from=builder_release /bin/proxy /bin/proxy_release
 RUN mv /bin/proxy_${GO_PROXY_BUILD_FROM} /bin/proxy
-ADD pythonbuild.py /bin/compile
-ADD pythonbuild.py.launcher.py /bin/compile.launcher.py
+ADD bin/compile /bin/compile
+ADD lib/launcher.py /lib/launcher.py
+
+# log initialization errors
+ENV OW_LOG_INIT_ERROR=1
+# the launcher must wait for an ack
+ENV OW_WAIT_FOR_ACK=1
+# using the runtime name to identify the execution environment
+ENV OW_EXECUTION_ENV=openwhisk/action-python-v3.7
+# compiler script
 ENV OW_COMPILER=/bin/compile
-ENTRYPOINT []
-CMD ["/bin/proxy"]
+
+ENTRYPOINT ["/bin/proxy"]
 
diff --git a/core/python3ActionLoop/Makefile b/core/python3ActionLoop/Makefile
new file mode 100644
index 0000000..7430512
--- /dev/null
+++ b/core/python3ActionLoop/Makefile
@@ -0,0 +1,31 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+IMG=whisk/actionloop-python-v3.7:latest
+
+build:
+       docker build -t $(IMG) .
+
+clean:
+       docker rmi -f $(IMG)
+
+debug: build
+       docker run -p 8080:8080 \
+       -ti --entrypoint=/bin/bash -v $(PWD):/mnt \
+       -e OW_COMPILER=/mnt/bin/compile \
+       $(IMG)
+
+.PHONY: build clean  debug
diff --git a/core/python3ActionLoop/bin/compile 
b/core/python3ActionLoop/bin/compile
new file mode 100755
index 0000000..d8eba62
--- /dev/null
+++ b/core/python3ActionLoop/bin/compile
@@ -0,0 +1,113 @@
+#!/usr/bin/env python3
+"""Python Action Builder
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+"""
+
+from __future__ import print_function
+import os, os.path, sys, ast, shutil, subprocess, traceback
+import importlib
+from os.path import abspath, exists, dirname
+
+# write a file creating intermediate directories
+def write_file(file, body, executable=False):
+    try: os.makedirs(dirname(file), mode=0o755)
+    except: pass
+    with open(file, mode="wb") as f:
+        f.write(body.encode("utf-8"))
+    if executable:
+        os.chmod(file, 0o755)
+
+# copy a file eventually replacing a substring
+def copy_replace(src, dst, match=None, replacement=""):
+    with open(src, 'rb') as s:
+        body = s.read()
+        if match:
+            body = body.decode("utf-8").replace(match, replacement)
+        write_file(dst, body)
+
+# assemble sources
+def sources(launcher, main, src_dir):
+    # move exec in the right place if exists
+    src_file = "%s/exec" % src_dir
+    if exists(src_file):
+        os.rename(src_file, "%s/__main__.py" % src_dir)
+    if exists("%s/__main__.py" % src_dir):
+        os.rename("%s/__main__.py" % src_dir, "%s/main__.py" % src_dir)
+
+    # write the boilerplate in a temp dir
+    copy_replace(launcher, "%s/exec__.py" % src_dir,
+          "from main__ import main as main",
+          "from main__ import %s as main" % main )
+
+# compile sources
+def build(src_dir, tgt_dir):
+    # in general, compile your program into an executable format
+    # for scripting languages, move sources and create a launcher
+    # move away the action dir and replace with the new
+    shutil.rmtree(tgt_dir)
+    shutil.move(src_dir, tgt_dir)
+    tgt_file = "%s/exec" % tgt_dir
+    write_file(tgt_file, """#!/bin/bash
+export PYTHONIOENCODING=UTF-8
+if [ "$(cat $0.env)" = "$__OW_EXECUTION_ENV" ]
+then cd "$(dirname $0)"
+     exec /usr/local/bin/python exec__.py "$@"
+else echo "Execution Environment Mismatch"
+     echo "Expected: $(cat $0.env)"
+     echo "Actual: $__OW_EXECUTION_ENV"
+     exit 1
+fi
+""", True)
+    write_file("%s.env"%tgt_file, os.environ['__OW_EXECUTION_ENV'])
+    return tgt_file
+
+#check if a module exists
+def check(tgt_dir, module_name):
+    try:
+        sys.path.append(tgt_dir)
+        mod = importlib.util.find_spec(module_name)
+        if mod:
+            with open(mod.origin, "rb") as f:
+                ast.parse(f.read().decode("utf-8"))
+            # check virtualenv
+            path_to_virtualenv = abspath('%s/virtualenv' % tgt_dir)
+            if os.path.isdir(path_to_virtualenv):
+                activate_this_file = path_to_virtualenv + 
'/bin/activate_this.py'
+                if not os.path.exists(activate_this_file):
+                   sys.stderr.write('Invalid virtualenv. Zip file does not 
include activate_this.py')
+        else:
+            sys.stderr.write("Zip file does not include %s" % module_name)
+    except SyntaxError as er:
+        sys.stderr.write(er.msg)
+    except Exception as ex:
+        sys.stderr.write(ex)
+    sys.stderr.flush()
+
+if __name__ == '__main__':
+    if len(sys.argv) < 4:
+        sys.stdout.write("usage: <main-function> <source-dir> <target-dir>\n")
+        sys.stdout.flush()
+        sys.exit(1)
+    launcher = "%s/lib/launcher.py" % dirname(dirname(sys.argv[0]))
+    src_dir = abspath(sys.argv[2])
+    tgt_dir = abspath(sys.argv[3])
+    sources(launcher, sys.argv[1], src_dir)
+    tgt = build(abspath(sys.argv[2]), tgt_dir)
+    check(tgt_dir, "main__")
+    sys.stdout.flush()
+    sys.stderr.flush()
diff --git a/core/pythonActionLoop/build.gradle 
b/core/python3ActionLoop/build.gradle
similarity index 100%
rename from core/pythonActionLoop/build.gradle
rename to core/python3ActionLoop/build.gradle
diff --git a/core/pythonActionLoop/pythonbuild.py.launcher.py 
b/core/python3ActionLoop/lib/launcher.py
similarity index 92%
copy from core/pythonActionLoop/pythonbuild.py.launcher.py
copy to core/python3ActionLoop/lib/launcher.py
index 360cb55..9773570 100755
--- a/core/pythonActionLoop/pythonbuild.py.launcher.py
+++ b/core/python3ActionLoop/lib/launcher.py
@@ -39,18 +39,15 @@ except Exception:
   sys.exit(1)
 
 # now import the action as process input/output
-warnings.filterwarnings("ignore")
 from main__ import main as main
-warnings.resetwarnings()
 
-# if there are some arguments exit immediately
-if len(sys.argv) >1:
-  sys.stderr.flush()
-  sys.stdout.flush()
-  sys.exit(0)
+out = fdopen(3, "wb")
+if os.getenv("__OW_WAIT_FOR_ACK", "") != "":
+    out.write(json.dumps({"ok": True}, ensure_ascii=False).encode('utf-8'))
+    out.write(b'\n')
+    out.flush()
 
 env = os.environ
-out = fdopen(3, "wb")
 while True:
   line = stdin.readline()
   if not line: break
diff --git a/core/python3AiActionLoop/Dockerfile 
b/core/python3AiActionLoop/Dockerfile
new file mode 100644
index 0000000..3b422e6
--- /dev/null
+++ b/core/python3AiActionLoop/Dockerfile
@@ -0,0 +1,87 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# build go proxy from source
+FROM golang:1.12 AS builder_source
+#RUN env CGO_ENABLED=0 go get github.com/apache/openwhisk-runtime-go/main && 
mv /go/bin/main /bin/proxy
+RUN git clone --branch dev \
+   https://github.com/nimbella-corp/openwhisk-runtime-go /src ;\
+   cd /src ; env GO111MODULE=on CGO_ENABLED=0 go build main/proxy.go && \
+   mv proxy /bin/proxy
+
+# or build it from a release
+FROM golang:1.12 AS builder_release
+ARG [email protected]
+RUN curl -sL \
+  
https://github.com/apache/openwhisk-runtime-go/archive/{$GO_PROXY_RELEASE_VERSION}.tar.gz\
+  | tar xzf -\
+  && cd openwhisk-runtime-go-*/main\
+  && GO111MODULE=on go build -o /bin/proxy
+
+# Dockerfile for python AI actions, overrides and extends ActionRunner from 
actionProxy
+FROM tensorflow/tensorflow:1.15.2-py3-jupyter
+
+# select the builder to use
+ARG GO_PROXY_BUILD_FROM=source
+
+RUN apt-get update && apt-get upgrade -y && apt-get install -y \
+            curl \
+            gcc \
+            libc-dev \
+            libxslt-dev \
+            libxml2-dev \
+            libffi-dev \
+            libssl-dev \
+            zip \
+            unzip \
+            vim \
+            && rm -rf /var/lib/apt/lists/*
+
+# PyTorch
+RUN pip3 install torch torchvision
+
+# rclone
+RUN curl -L https://downloads.rclone.org/rclone-current-linux-amd64.deb -o 
rclone.deb \
+    && dpkg -i rclone.deb \
+    && rm rclone.deb
+
+COPY requirements.txt requirements.txt
+RUN pip3 install --upgrade pip six &&\
+    pip3 install --no-cache-dir -r requirements.txt &&\
+    ln -sf /usr/bin/python3 /usr/local/bin/python
+
+RUN mkdir -p /action
+WORKDIR /
+
+COPY --from=builder_source /bin/proxy /bin/proxy_source
+COPY --from=builder_release /bin/proxy /bin/proxy_release
+RUN mv /bin/proxy_${GO_PROXY_BUILD_FROM} /bin/proxy
+
+ADD bin/compile /bin/compile
+ADD lib/launcher.py /lib/launcher.py
+
+# log initialization errors
+ENV OW_LOG_INIT_ERROR=1
+# the launcher must wait for an ack
+ENV OW_WAIT_FOR_ACK=1
+# using the runtime name to identify the execution environment
+ENV OW_EXECUTION_ENV=openwhisk/action-python-v3.6-ai
+# compiler script
+ENV OW_COMPILER=/bin/compile
+# use utf-8
+ENV PYTHONIOENCODING=UTF-8
+ENTRYPOINT ["/bin/proxy"]
diff --git a/core/python3AiActionLoop/Makefile 
b/core/python3AiActionLoop/Makefile
new file mode 100644
index 0000000..61560ee
--- /dev/null
+++ b/core/python3AiActionLoop/Makefile
@@ -0,0 +1,32 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+IMG=whisk/actionloop-python-v3.6-ai:latest
+
+build:
+       docker build -t $(IMG) -f Dockerfile .
+
+clean:
+       docker rmi -f $(IMG)
+
+debug:
+       docker run -p 8080:8080 \
+       -ti --entrypoint=/bin/bash -v $(PWD):/mnt \
+       -e OW_COMPILER=/mnt/bin/compile \
+       $(IMG)
+
+.PHONY: build clean  debug
+
diff --git a/core/python3AiActionLoop/README.md 
b/core/python3AiActionLoop/README.md
new file mode 100644
index 0000000..c27663d
--- /dev/null
+++ b/core/python3AiActionLoop/README.md
@@ -0,0 +1,85 @@
+<!--
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+-->
+
+# AI Action
+
+This image contains libraries and frameworks useful for running AI Services.
+
+Bellow are the versions for the included libraries:
+
+| Image Version | Package | Notes |
+| ------------- | ------- | ----- |
+| 1.1.0      | Tensorflow 1.11.0, PyTorch 0.4.1 | Based on Ubuntu 16.04.5, 
Python 3.5.2.
+
+### Opening Notebooks
+
+This image has Jupyter Notebook installed. You may find useful to run quick 
Notebooks directly on the image which may run the actual code. To start Jupyter 
Notebook execute:
+
+```bash
+$ docker run -it -p 8888:8888 --rm --entrypoint jupyter-notebook 
openwhisk/actionloop-python-v3.7ai  --notebook-dir=/notebooks --ip 0.0.0.0 
--no-browser --allow-root
+```
+
+#### AI Action Sample
+
+To view an example with this AI Action check the [samples/smart-body-crop 
notebook](./samples/smart-body-crop/crop.ipynb) and follow the instructions.
+
+### 1.1.0 Details
+#### Available python packages
+
+| Package               | Version               |
+| --------------------- | --------------------- |
+| tensorboard           | 1.11.0                |
+| tensorflow            | 1.11.0                |
+| torch                 | 0.4.1                 |
+| torchvision           | 0.2.1                 |
+| scikit-learn          | 0.19.2                |
+| scipy                 | 1.1.0                 |
+| sklearn               | 0.0                   |
+| numpy                 | 1.15.2                |
+| pandas                | 0.23.4                |
+| Pillow                | 5.2.0                 |
+| Cython                | 0.28.5                |
+| ipykernel             | 4.9.0                 |
+| ipython               | 6.5.0                 |
+| ipywidgets            | 7.4.2                 |
+| jupyter               | 1.0.0                 |
+| jupyter-client        | 5.2.3                 |
+| jupyter-console       | 5.2.0                 |
+| jupyter-core          | 4.4.0                 |
+| Keras                 | 2.2.2                 |
+| Keras-Applications    | 1.0.4                 |
+| Keras-Preprocessing   | 1.0.2                 |
+| matplotlib            | 3.0.0                 |
+| notebook              | 5.7.0                 |
+| opencv-contrib-python | 3.4.2.17              |
+| protobuf              | 3.6.1                 |
+
+For a complete list execute:
+
+```bash
+$ docker run --rm --entrypoint pip openwhisk/python3aiaction list
+```
+
+#### Available Ubuntu packages
+
+For a complete list execute:
+
+```bash
+$ docker run --rm --entrypoint apt openwhisk/python3aiaction list --installed
+```
diff --git a/core/python3AiActionLoop/bin/compile 
b/core/python3AiActionLoop/bin/compile
new file mode 100755
index 0000000..d8eba62
--- /dev/null
+++ b/core/python3AiActionLoop/bin/compile
@@ -0,0 +1,113 @@
+#!/usr/bin/env python3
+"""Python Action Builder
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+"""
+
+from __future__ import print_function
+import os, os.path, sys, ast, shutil, subprocess, traceback
+import importlib
+from os.path import abspath, exists, dirname
+
+# write a file creating intermediate directories
+def write_file(file, body, executable=False):
+    try: os.makedirs(dirname(file), mode=0o755)
+    except: pass
+    with open(file, mode="wb") as f:
+        f.write(body.encode("utf-8"))
+    if executable:
+        os.chmod(file, 0o755)
+
+# copy a file eventually replacing a substring
+def copy_replace(src, dst, match=None, replacement=""):
+    with open(src, 'rb') as s:
+        body = s.read()
+        if match:
+            body = body.decode("utf-8").replace(match, replacement)
+        write_file(dst, body)
+
+# assemble sources
+def sources(launcher, main, src_dir):
+    # move exec in the right place if exists
+    src_file = "%s/exec" % src_dir
+    if exists(src_file):
+        os.rename(src_file, "%s/__main__.py" % src_dir)
+    if exists("%s/__main__.py" % src_dir):
+        os.rename("%s/__main__.py" % src_dir, "%s/main__.py" % src_dir)
+
+    # write the boilerplate in a temp dir
+    copy_replace(launcher, "%s/exec__.py" % src_dir,
+          "from main__ import main as main",
+          "from main__ import %s as main" % main )
+
+# compile sources
+def build(src_dir, tgt_dir):
+    # in general, compile your program into an executable format
+    # for scripting languages, move sources and create a launcher
+    # move away the action dir and replace with the new
+    shutil.rmtree(tgt_dir)
+    shutil.move(src_dir, tgt_dir)
+    tgt_file = "%s/exec" % tgt_dir
+    write_file(tgt_file, """#!/bin/bash
+export PYTHONIOENCODING=UTF-8
+if [ "$(cat $0.env)" = "$__OW_EXECUTION_ENV" ]
+then cd "$(dirname $0)"
+     exec /usr/local/bin/python exec__.py "$@"
+else echo "Execution Environment Mismatch"
+     echo "Expected: $(cat $0.env)"
+     echo "Actual: $__OW_EXECUTION_ENV"
+     exit 1
+fi
+""", True)
+    write_file("%s.env"%tgt_file, os.environ['__OW_EXECUTION_ENV'])
+    return tgt_file
+
+#check if a module exists
+def check(tgt_dir, module_name):
+    try:
+        sys.path.append(tgt_dir)
+        mod = importlib.util.find_spec(module_name)
+        if mod:
+            with open(mod.origin, "rb") as f:
+                ast.parse(f.read().decode("utf-8"))
+            # check virtualenv
+            path_to_virtualenv = abspath('%s/virtualenv' % tgt_dir)
+            if os.path.isdir(path_to_virtualenv):
+                activate_this_file = path_to_virtualenv + 
'/bin/activate_this.py'
+                if not os.path.exists(activate_this_file):
+                   sys.stderr.write('Invalid virtualenv. Zip file does not 
include activate_this.py')
+        else:
+            sys.stderr.write("Zip file does not include %s" % module_name)
+    except SyntaxError as er:
+        sys.stderr.write(er.msg)
+    except Exception as ex:
+        sys.stderr.write(ex)
+    sys.stderr.flush()
+
+if __name__ == '__main__':
+    if len(sys.argv) < 4:
+        sys.stdout.write("usage: <main-function> <source-dir> <target-dir>\n")
+        sys.stdout.flush()
+        sys.exit(1)
+    launcher = "%s/lib/launcher.py" % dirname(dirname(sys.argv[0]))
+    src_dir = abspath(sys.argv[2])
+    tgt_dir = abspath(sys.argv[3])
+    sources(launcher, sys.argv[1], src_dir)
+    tgt = build(abspath(sys.argv[2]), tgt_dir)
+    check(tgt_dir, "main__")
+    sys.stdout.flush()
+    sys.stderr.flush()
diff --git a/core/pythonAction/build.gradle 
b/core/python3AiActionLoop/build.gradle
similarity index 94%
rename from core/pythonAction/build.gradle
rename to core/python3AiActionLoop/build.gradle
index f120d86..ad8d697 100644
--- a/core/pythonAction/build.gradle
+++ b/core/python3AiActionLoop/build.gradle
@@ -15,5 +15,5 @@
  * limitations under the License.
  */
 
-ext.dockerImageName = 'python3action'
+ext.dockerImageName = 'actionloop-python-v3.6-ai'
 apply from: '../../gradle/docker.gradle'
diff --git a/core/pythonActionLoop/pythonbuild.py.launcher.py 
b/core/python3AiActionLoop/lib/launcher.py
similarity index 92%
rename from core/pythonActionLoop/pythonbuild.py.launcher.py
rename to core/python3AiActionLoop/lib/launcher.py
index 360cb55..9773570 100755
--- a/core/pythonActionLoop/pythonbuild.py.launcher.py
+++ b/core/python3AiActionLoop/lib/launcher.py
@@ -39,18 +39,15 @@ except Exception:
   sys.exit(1)
 
 # now import the action as process input/output
-warnings.filterwarnings("ignore")
 from main__ import main as main
-warnings.resetwarnings()
 
-# if there are some arguments exit immediately
-if len(sys.argv) >1:
-  sys.stderr.flush()
-  sys.stdout.flush()
-  sys.exit(0)
+out = fdopen(3, "wb")
+if os.getenv("__OW_WAIT_FOR_ACK", "") != "":
+    out.write(json.dumps({"ok": True}, ensure_ascii=False).encode('utf-8'))
+    out.write(b'\n')
+    out.flush()
 
 env = os.environ
-out = fdopen(3, "wb")
 while True:
   line = stdin.readline()
   if not line: break
diff --git a/core/python3AiActionLoop/requirements.txt 
b/core/python3AiActionLoop/requirements.txt
new file mode 100644
index 0000000..44e5a9b
--- /dev/null
+++ b/core/python3AiActionLoop/requirements.txt
@@ -0,0 +1,44 @@
+# default available packages for python3action
+beautifulsoup4 == 4.8.2
+httplib2 == 0.17.0
+kafka_python == 1.4.7
+lxml == 4.5.0
+python-dateutil == 2.8.1
+requests == 2.22.0
+scrapy == 1.8.0
+simplejson == 3.17.0
+virtualenv == 16.7.9
+twisted == 19.10.0
+netifaces == 0.10.9
+
+# package to sync from a variety of cloud blob storage
+python-rclone == 0.0.2
+
+# more ML/DL packages
+keras == 2.3.1
+opencv-contrib-python == 4.2.0.32
+Cython == 0.29.14
+tools == 0.1.9
+scikit-image == 0.16.2
+
+nltk == 3.4.5
+
+PyJWT == 1.7.1
+
+# packages for numerics
+numpy == 1.18.1
+scikit-learn == 0.22.1
+scipy == 1.4.1
+pandas == 1.0.1
+
+# packages for image processing
+Pillow == 7.0.0
+
+# Etc
+pymongo == 3.10.1
+redis == 3.4.1
+pika == 1.1.0
+elasticsearch == 7.5.1
+cassandra-driver == 3.21.0
+etcd3 == 0.11.1
+twilio == 6.35.4
diff --git a/core/python3AiActionLoop/samples/smart-body-crop/.gitignore 
b/core/python3AiActionLoop/samples/smart-body-crop/.gitignore
new file mode 100644
index 0000000..e5d0af6
--- /dev/null
+++ b/core/python3AiActionLoop/samples/smart-body-crop/.gitignore
@@ -0,0 +1,5 @@
+models
+action.zip
+action_package
+.ipynb_checkpoints
+__pycache__
diff --git a/core/python3AiActionLoop/samples/smart-body-crop/common.py 
b/core/python3AiActionLoop/samples/smart-body-crop/common.py
new file mode 100644
index 0000000..4c6ece1
--- /dev/null
+++ b/core/python3AiActionLoop/samples/smart-body-crop/common.py
@@ -0,0 +1,332 @@
+"""Executable Python script for running Python actions.
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+"""
+
+'''
+Some code is based on Ildoo Kim's code 
(https://github.com/ildoonet/tf-openpose) and 
https://gist.github.com/alesolano/b073d8ec9603246f766f9f15d002f4f4
+and derived from the OpenPose Library 
(https://github.com/CMU-Perceptual-Computing-Lab/openpose/blob/master/LICENSE)
+'''
+
+from collections import defaultdict
+from enum import Enum
+import math
+import numpy as np
+import itertools
+from scipy.ndimage.filters import maximum_filter
+from PIL import Image, ImageDraw
+
+
+class CocoPart(Enum):
+    Nose = 0
+    Neck = 1
+    RShoulder = 2
+    RElbow = 3
+    RWrist = 4
+    LShoulder = 5
+    LElbow = 6
+    LWrist = 7
+    RHip = 8
+    RKnee = 9
+    RAnkle = 10
+    LHip = 11
+    LKnee = 12
+    LAnkle = 13
+    REye = 14
+    LEye = 15
+    REar = 16
+    LEar = 17
+    Background = 18
+
+
+parts_dict = {'Nose': [0], 'Neck': [1], 'Shoulders': [2, 5], 'Elbows': [3, 6], 
'Wrists': [
+    4, 7], 'Hips': [8, 11], 'Knees': [9, 12], 'Ankles': [10, 13], 'Eyes': [14, 
15], 'Ears': [16, 17]}
+# 
parts_if_notfound_upper={'Eyes':'Ears','Ears':'Eyes','Nose':'Ears','Neck':'Nose','Shoulders':'Neck','Elbows':'Shoulders','Wrists':'Elbows','Hips':'Wrists#','Knees':'Hips'}
+# 
parts_if_notfound_lower=#{'Ears':'Nose','Nose':'Neck','Neck':'Shoulders','Shoulders':'Elbows','Elbows':'Wrists','Wrists':'Hips','Hips':'Knees',
+#                  'Knees':'Ankles','Ankles':'Knees'}
+body_parts_dict = {0: 'Eyes', 1: 'Ears', 2: 'Nose', 3: 'Neck', 4: 'Shoulders',
+                   5: 'Elbows', 6: 'Wrists', 7: 'Hips', 8: 'Knees', 9: 
'Ankles'}
+
+CocoPairs = [
+    (1, 2), (1, 5), (2, 3), (3, 4), (5, 6), (6,
+                                             7), (1, 8), (8, 9), (9, 10), (1, 
11),
+    (11, 12), (12, 13), (1, 0), (0, 14), (14,
+                                          16), (0, 15), (15, 17), (2, 16), (5, 
17)
+]   # = 19
+CocoPairsRender = CocoPairs[:-2]
+CocoPairsNetwork = [
+    (12, 13), (20, 21), (14, 15), (16, 17), (22,
+                                             23), (24, 25), (0, 1), (2, 3), 
(4, 5),
+    (6, 7), (8, 9), (10, 11), (28, 29), (30, 31), (34,
+                                                   35), (32, 33), (36, 37), 
(18, 19), (26, 27)
+]  # = 19
+
+CocoColors = [(255, 0, 0), (255, 85, 0), (255, 170, 0), (255, 255, 0), (170, 
255, 0), (85, 255, 0), (0, 255, 0),
+              (0, 255, 85), (0, 255, 170), (0, 255, 255), (0,
+                                                           170, 255), (0, 85, 
255), (0, 0, 255), (85, 0, 255),
+              (170, 0, 255), (255, 0, 255), (255, 0, 170), (255, 0, 85)]
+
+
+NMS_Threshold = 0.1
+InterMinAbove_Threshold = 6
+Inter_Threashold = 0.1
+Min_Subset_Cnt = 4
+Min_Subset_Score = 0.8
+Max_Human = 96
+
+
+def human_conns_to_human_parts(human_conns, heatMat):
+    human_parts = defaultdict(lambda: None)
+    for conn in human_conns:
+        human_parts[conn['partIdx'][0]] = (
+            conn['partIdx'][0],  # part index
+            (conn['coord_p1'][0] / heatMat.shape[2], conn['coord_p1']
+             [1] / heatMat.shape[1]),  # relative coordinates
+            heatMat[conn['partIdx'][0], conn['coord_p1']
+                    [1], conn['coord_p1'][0]]  # score
+        )
+        human_parts[conn['partIdx'][1]] = (
+            conn['partIdx'][1],
+            (conn['coord_p2'][0] / heatMat.shape[2],
+             conn['coord_p2'][1] / heatMat.shape[1]),
+            heatMat[conn['partIdx'][1], conn['coord_p2']
+                    [1], conn['coord_p2'][0]]
+        )
+    return human_parts
+
+
+def non_max_suppression(heatmap, window_size=3, threshold=NMS_Threshold):
+    heatmap[heatmap < threshold] = 0  # set low values to 0
+    part_candidates = heatmap * \
+        (heatmap == maximum_filter(heatmap, footprint=np.ones((window_size, 
window_size))))
+    return part_candidates
+
+
+def estimate_pose(heatMat, pafMat):
+    if heatMat.shape[2] == 19:
+        # transform from [height, width, n_parts] to [n_parts, height, width]
+        heatMat = np.rollaxis(heatMat, 2, 0)
+    if pafMat.shape[2] == 38:
+        # transform from [height, width, 2*n_pairs] to [2*n_pairs, height, 
width]
+        pafMat = np.rollaxis(pafMat, 2, 0)
+
+    # reliability issue.
+    heatMat = heatMat - heatMat.min(axis=1).min(axis=1).reshape(19, 1, 1)
+    heatMat = heatMat - heatMat.min(axis=2).reshape(19, heatMat.shape[1], 1)
+
+    _NMS_Threshold = max(np.average(heatMat) * 4.0, NMS_Threshold)
+    _NMS_Threshold = min(_NMS_Threshold, 0.3)
+
+    coords = []  # for each part index, it stores coordinates of candidates
+    for heatmap in heatMat[:-1]:  # remove background
+        part_candidates = non_max_suppression(heatmap, 5, _NMS_Threshold)
+        coords.append(np.where(part_candidates >= _NMS_Threshold))
+
+    # all connections detected. no information about what humans they belong to
+    connection_all = []
+    for (idx1, idx2), (paf_x_idx, paf_y_idx) in zip(CocoPairs, 
CocoPairsNetwork):
+        connection = estimate_pose_pair(
+            coords, idx1, idx2, pafMat[paf_x_idx], pafMat[paf_y_idx])
+        connection_all.extend(connection)
+
+    conns_by_human = dict()
+    for idx, c in enumerate(connection_all):
+        # at first, all connections belong to different humans
+        conns_by_human['human_%d' % idx] = [c]
+
+    no_merge_cache = defaultdict(list)
+    empty_set = set()
+    while True:
+        is_merged = False
+        for h1, h2 in itertools.combinations(conns_by_human.keys(), 2):
+            if h1 == h2:
+                continue
+            if h2 in no_merge_cache[h1]:
+                continue
+            for c1, c2 in itertools.product(conns_by_human[h1], 
conns_by_human[h2]):
+                # if two humans share a part (same part idx and coordinates), 
merge those humans
+                if set(c1['uPartIdx']) & set(c2['uPartIdx']) != empty_set:
+                    is_merged = True
+                    # extend human1 connectios with human2 connections
+                    conns_by_human[h1].extend(conns_by_human[h2])
+                    conns_by_human.pop(h2)  # delete human2
+                    break
+            if is_merged:
+                no_merge_cache.pop(h1, None)
+                break
+            else:
+                no_merge_cache[h1].append(h2)
+
+        if not is_merged:  # if no more mergings are possible, then break
+            break
+
+    conns_by_human = {h: conns for (
+        h, conns) in conns_by_human.items() if len(conns) >= Min_Subset_Cnt}
+    conns_by_human = {h: conns for (h, conns) in conns_by_human.items() if max(
+        [conn['score'] for conn in conns]) >= Min_Subset_Score}
+
+    humans = [human_conns_to_human_parts(
+        human_conns, heatMat) for human_conns in conns_by_human.values()]
+    return humans
+
+
+def estimate_pose_pair(coords, partIdx1, partIdx2, pafMatX, pafMatY):
+    connection_temp = []  # all possible connections
+    peak_coord1, peak_coord2 = coords[partIdx1], coords[partIdx2]
+
+    for idx1, (y1, x1) in enumerate(zip(peak_coord1[0], peak_coord1[1])):
+        for idx2, (y2, x2) in enumerate(zip(peak_coord2[0], peak_coord2[1])):
+            score, count = get_score(x1, y1, x2, y2, pafMatX, pafMatY)
+            if (partIdx1, partIdx2) in [(2, 3), (3, 4), (5, 6), (6, 7)]:  # 
arms
+                if count < InterMinAbove_Threshold // 2 or score <= 0.0:
+                    continue
+            elif count < InterMinAbove_Threshold or score <= 0.0:
+                continue
+            connection_temp.append({
+                'score': score,
+                'coord_p1': (x1, y1),
+                'coord_p2': (x2, y2),
+                'idx': (idx1, idx2),  # connection candidate identifier
+                'partIdx': (partIdx1, partIdx2),
+                'uPartIdx': ('{}-{}-{}'.format(x1, y1, partIdx1), 
'{}-{}-{}'.format(x2, y2, partIdx2))
+            })
+
+    connection = []
+    used_idx1, used_idx2 = [], []
+    for conn_candidate in sorted(connection_temp, key=lambda x: x['score'], 
reverse=True):
+        if conn_candidate['idx'][0] in used_idx1 or conn_candidate['idx'][1] 
in used_idx2:
+            continue
+        connection.append(conn_candidate)
+        used_idx1.append(conn_candidate['idx'][0])
+        used_idx2.append(conn_candidate['idx'][1])
+
+    return connection
+
+
+def get_score(x1, y1, x2, y2, pafMatX, pafMatY):
+    num_inter = 10
+    dx, dy = x2 - x1, y2 - y1
+    normVec = math.sqrt(dx ** 2 + dy ** 2)
+
+    if normVec < 1e-4:
+        return 0.0, 0
+
+    vx, vy = dx / normVec, dy / normVec
+
+    xs = np.arange(
+        x1, x2, dx / num_inter) if x1 != x2 else np.full((num_inter, ), x1)
+    ys = np.arange(
+        y1, y2, dy / num_inter) if y1 != y2 else np.full((num_inter, ), y1)
+    xs = (xs + 0.5).astype(np.int8)
+    ys = (ys + 0.5).astype(np.int8)
+
+    # without vectorization
+    pafXs = np.zeros(num_inter)
+    pafYs = np.zeros(num_inter)
+    for idx, (mx, my) in enumerate(zip(xs, ys)):
+        pafXs[idx] = pafMatX[my][mx]
+        pafYs[idx] = pafMatY[my][mx]
+
+    local_scores = pafXs * vx + pafYs * vy
+    thidxs = local_scores > Inter_Threashold
+
+    return sum(local_scores * thidxs), sum(thidxs)
+
+
+def draw_humans(img1_raw, human_list):
+    img = np.asarray(img1_raw)
+    img_copied = np.copy(img)
+    image_h, image_w = img_copied.shape[:2]
+    centers = {}
+    c = 10
+    for human in human_list:
+        part_idxs = human.keys()
+
+        # draw point
+        draw = ImageDraw.Draw(img1_raw)
+        for i in range(CocoPart.Background.value):
+            if i not in part_idxs:
+                continue
+            part_coord = human[i][1]
+            center = (int(part_coord[0] * image_w + 0.5),
+                      int(part_coord[1] * image_h + 0.5))
+            centers[i] = center
+            bbox = (center[0] - c, center[1] - c, center[0] + c, center[1] + c)
+            draw.ellipse(bbox, fill=CocoColors[i])
+
+        # draw line
+        ctr = 1
+        for pair_order, pair in enumerate(CocoPairsRender):
+            if pair[0] not in part_idxs or pair[1] not in part_idxs:
+                continue
+            draw.line((centers[pair[0]][0], centers[pair[0]][1], 
centers[pair[1]]
+                       [0], centers[pair[1]][1]), fill=CocoColors[pair_order], 
width=5)
+    img1_raw = np.asarray(img1_raw)
+    del draw
+    return img1_raw
+
+
+def crop_image(img, humans_list, upper_body, lower_body):
+    upper_coord = 0.0
+    upper_coord_x = 0.0
+    lower_coord = 0.0
+    lower_coord_x = 0.0
+
+    img = np.asarray(img)
+    image_h, image_w = img.shape[:2]
+
+    if upper_body == 'Ankles' or lower_body == 'Eyes':
+        raise NameError('Body parts not consistent')
+
+    for human in humans_list:
+        parts = human.keys()
+        inte = parts_dict[upper_body]  # could be [1] or [2,3]
+
+        if upper_body == 'Nose' or upper_body == 'Neck':
+            upper_coord = human[inte[0]][1][1]  # interested only in heights.
+            upper_coord_x = human[inte[0]][1][0]
+        else:
+            upper_coord = (human[inte[0]][1][1] + human[inte[1]][1][1])/2
+            upper_coord_x = (human[inte[0]][1][0] + human[inte[1]][1][0])/2
+
+        inte = parts_dict[lower_body]
+        if lower_body == 'Nose' or lower_body == 'Neck':
+            lower_coord = human[inte[0]][1][1]  # interested only in heights.
+            lower_coord_x = human[inte[0]][1][0]
+        else:
+            lower_coord = (human[inte[0]][1][1] + human[inte[1]][1][1])/2
+            lower_coord_x = (human[inte[0]][1][0] + human[inte[1]][1][0])/2
+
+    image_h_u = int(upper_coord * image_h)
+    image_h_l = int(lower_coord * image_h)
+
+    image_w_left = int(upper_coord_x * image_w)
+    image_w_right = int(lower_coord_x * image_w)
+    aspect_ratio = image_h / image_w
+    image_w = int((image_w_left + image_w_right)/2)
+
+    img = img[image_h_u:image_h_l]
+    wid = int((img.shape[0]/aspect_ratio)/2)
+    img = img.transpose(1, 0, 2)
+    img = img[image_w-2*wid:image_w+2*wid]
+    img = img.transpose(1, 0, 2)
+
+    crop_position = (image_w-2*wid, image_h_u)
+    crop_size = (img.shape[1], img.shape[0])
+
+    return img, crop_position, crop_size
diff --git a/core/python3AiActionLoop/samples/smart-body-crop/crop.ipynb 
b/core/python3AiActionLoop/samples/smart-body-crop/crop.ipynb
new file mode 100644
index 0000000..3ea6633
--- /dev/null
+++ b/core/python3AiActionLoop/samples/smart-body-crop/crop.ipynb
@@ -0,0 +1,872 @@
+{
+ "cells": [
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# AI Action example: Smart Body Crop "
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "This notebook illustrates how a ML engineer develops an algorithm and 
deploys it in a serverless environment directly from the notebook itself. \n",
+    "\n",
+    "To make it faster to run, the training is skipped. This example reuses a 
pre-trained OpenPose model to identify a person in a picture, and then crops 
the body to highlight the desired clothing item.\n",
+    "\n",
+    "### Running the notebook locally\n",
+    "\n",
+    "Simply execute:\n",
+    "        \n",
+    "        $ docker run -it -p 8888:8888 -e OPENWHISK_AUTH=`cat ~/.wskprops 
| grep ^AUTH= | awk -F= '{print $2}'` -e OPENWHISK_APIHOST=`cat ~/.wskprops | 
grep ^APIHOST= | awk -F= '{print $2}'` --rm -v `pwd`:/notebooks/sf  
--entrypoint jupyter-notebook adobeapiplatform/openwhisk-python3aiaction:0.11.0 
 --notebook-dir=/notebooks --ip 0.0.0.0 --no-browser --allow-root\n",
+    "\n",
+    "> This command reads the local `~/.wskprops` and uses the Apache 
OpenWhisk credentials within."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 1,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "from matplotlib import pyplot as plt\n",
+    "import matplotlib.patches as patches\n",
+    "\n",
+    "%matplotlib inline\n",
+    "from inference import SmartBodyCrop\n",
+    "from PIL import Image\n",
+    "import numpy as np"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 2,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "#model_url = 
\"https://s3.amazonaws.com/rt-dev-public-models/openpose/2dw1oz9l9hi9avg/optimized_openpose.pb\"\n";,
+    "model_url = \"models/optimized_openpose.pb\"\n",
+    "inf = SmartBodyCrop(model_url = model_url)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Download the deep learning (open pose) model\n"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 4,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "  % Total    % Received % Xferd  Average Speed   Time    Time     Time  
Current\n",
+      "                                 Dload  Upload   Total   Spent    Left  
Speed\n",
+      "  0     0    0     0    0     0      0      0 --:--:-- --:--:-- 
--:--:--     0\n",
+      "  0     0    0     0    0     0      0      0 --:--:-- --:--:-- 
--:--:--     0\n",
+      "100  1204    0  1204    0     0    983      0 --:--:--  0:00:01 
--:--:-- 1175k\n",
+      "100  199M  100  199M    0     0  13.8M      0  0:00:14  0:00:14 
--:--:-- 19.5M\n"
+     ]
+    }
+   ],
+   "source": [
+    "!mkdir -p models\n",
+    "# Comment the line bellow downloading the model, once you have it 
locally.\n",
+    "!curl -L https://www.dropbox.com/s/2dw1oz9l9hi9avg/optimized_openpose.pb 
-o models/optimized_openpose.pb"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 23,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "245\n"
+     ]
+    },
+    {
+     "data": {
+      "image/png": 
"iVBORw0KGgoAAAANSUhEUgAAAOAAAAEyCAYAAADjrNxxAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4wLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvqOYd8AAAIABJREFUeJzsvWmMpdlZJvicG3dfYs3IiMi1sjbXYuPyboQFRUMzNBpsDxbQRqLNDMJIuH+MhBCeXyCNWuIH02DRqC1bTbctD7SRcYMFlqGxaOzBdrnKlKtcpF1bZlZmRmVmrDfi7us3PyKfE+994z3f/W5UFb41k690db/lfGd/3u1sLooi3KE7dIe+P5T6fmfgDt2h/z/THQDeoTv0faQ7ALxDd+j7SHcAeIfu0PeR7gDwDt2h7yPdAeAdukPfR3rNAOic+0nn3LPOuReccx99rdK5
 [...]
+      "text/plain": [
+       "<Figure size 720x360 with 1 Axes>"
+      ]
+     },
+     "metadata": {
+      "needs_background": "light"
+     },
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "image = Image.open('fashion-men-1.jpg')\n",
+    "image.thumbnail( (368,368) )\n",
+    "print(image.size[0])\n",
+    "image = np.asarray(image)\n",
+    "plt.figure(figsize = (10,5))\n",
+    "plt.imshow(image)\n",
+    "plt.show()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## How algorithm sees the body"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 18,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "image loaded in:      0.1135\n",
+      "Loading the model...\n",
+      "model imported in :     1.5586\n",
+      "tf session executed in:      5.4359\n",
+      "pose estimated in:      0.0048\n"
+     ]
+    },
+    {
+     "data": {
+      "image/png": 
"iVBORw0KGgoAAAANSUhEUgAAAOAAAAEyCAYAAADjrNxxAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4wLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvqOYd8AAAIABJREFUeJzsvWmQZNd1Jvbd3NfKWru7eu/GwgaanAYFbhIgCeDikRRkkBwZrSXCQ40miLFFWZKt8Ij8YVPhiJmQ7bHGntHEhKkwNaRojgTQJqUQGRQXU9JoOFxAgqBALASIbnR3dXdV15JVWZV75vOPrHPrvJPn3vdedYNMhPtEZOR799393u+cc8/dTBAEuE236Tb9aCj1o87AbbpN/3+m2wC8TbfpR0i3AXibbtOPkG4D8Dbdph8h3QbgbbpNP0K6DcDbdJt+hPSKAdAY8zPGmOeN
 [...]
+      "text/plain": [
+       "<Figure size 720x360 with 1 Axes>"
+      ]
+     },
+     "metadata": {
+      "needs_background": "light"
+     },
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "x = inf.detect_parts('fashion-men-1.jpg')\n",
+    "#x = 
inf.detect_parts('https://cdn.shopify.com/s/files/1/1970/6605/products/Pioneer-Camp-2017-spring-new-fashion-men-shirt-long-sleeve-brand-clothing-quality-cotton-soft-shirt_e262fa2c-a279-4190-9cf7-707982189e9e.jpg?v=1501310825')\n",
+    "plt.figure(figsize=(10,5))\n",
+    "plt.imshow(x)\n",
+    "plt.show()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Bodycrop based on detected body parts"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 20,
+   "metadata": {
+    "scrolled": false
+   },
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "image (3840x5760) loaded in:      0.1047\n",
+      "Loading the model...\n",
+      "model imported in :     1.4202\n",
+      "tf session executed in:      5.4498\n",
+      "pose estimated in:      0.0051\n",
+      "image cropped in:      0.0002\n"
+     ]
+    },
+    {
+     "data": {
+      "image/png": 
"iVBORw0KGgoAAAANSUhEUgAAAUEAAAD8CAYAAADpLRYuAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4wLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvqOYd8AAAIABJREFUeJztnXuwbVdV5n+TG94Y8uTm5t48LiRFKjwSrolEiYAGFWyUUCKiFk0LXfnHRrSsUmj/sLrKKttqS6CraLujIHRXikdDeAi0PCKUUqUhSScGSEhIbl73ksclD0BQIbL6j3O+s8ceZ6x55lp7n3327T2+qlNn773WmnOuudZe+xtjfGPM0nUdiUQisap4zE4PIJFIJHYS+RBMJBIrjXwIJhKJlUY+BBOJxEojH4KJRGKlkQ/BRCKx0siHYCKRWGnM9BAspby0lHJLKeW2Usqb
 [...]
+      "text/plain": [
+       "<Figure size 576x288 with 1 Axes>"
+      ]
+     },
+     "metadata": {
+      "needs_background": "light"
+     },
+     "output_type": "display_data"
+    },
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "CPU times: user 8.79 s, sys: 1.87 s, total: 10.7 s\n",
+      "Wall time: 5.78 s\n"
+     ]
+    }
+   ],
+   "source": [
+    "%%time\n",
+    "x, coordinates, imgpath = inf.infer('fashion-men-1.jpg','Eyes','Hips')\n",
+    "plt.figure(figsize = (8,4))\n",
+    "plt.imshow(x)\n",
+    "plt.show()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Draw the crop coordinates on the original image"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 22,
+   "metadata": {},
+   "outputs": [
+    {
+     "data": {
+      "image/png": 
"iVBORw0KGgoAAAANSUhEUgAAAZwAAAJCCAYAAAD0nXH7AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4wLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvqOYd8AAAIABJREFUeJzsvUtzXNl17/k/efKdCeQDCYB4EeCbVWSV6mFVWQ7Zcjh05Q5PenpveODZjXBEf4D+Av0l7syzjh560OEeeOCwwrYsyVKpqshiFd8EQbwTyAfynacHqd/KladQKqo7usRw40QwSAKZ5+yz93r813+tvXYQRZEurovr4rq4Lq6L6//rK/GHHsDFdXFdXBfXxfX/j+vC4VxcF9fFdXFdXN/JdeFwLq6L6+K6uC6u7+S6cDgX18V1cV1cF9d3cl04nIvr4rq4Lq6L6zu5LhzO
 [...]
+      "text/plain": [
+       "<Figure size 1008x720 with 1 Axes>"
+      ]
+     },
+     "metadata": {
+      "needs_background": "light"
+     },
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "img = Image.open('fashion-men-1.jpg')\n",
+    "\n",
+    "# Create figure and axes\n",
+    "fig,ax = plt.subplots(1,figsize=(14,10))\n",
+    "ax.imshow(img)\n",
+    "\n",
+    "# Create a Rectangle patch\n",
+    "rect = patches.Rectangle(\n",
+    "    (coordinates.get('x'),coordinates.get('y')),\n",
+    "    coordinates.get('width'),coordinates.get('height'),\n",
+    "    linewidth = 3, \n",
+    "    edgecolor = 'r',\n",
+    "    facecolor = 'none')\n",
+    "\n",
+    "# Add the patch to the Axes\n",
+    "ax.add_patch(rect)\n",
+    "plt.show()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Test with a remote image "
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 24,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "image downloaded in :     0.3233\n",
+      "image (2192x4299) loaded in:      0.4292\n",
+      "Loading the model...\n",
+      "model imported in :     1.6012\n",
+      "tf session executed in:      5.5260\n",
+      "pose estimated in:      0.0030\n",
+      "image cropped in:      0.0001\n"
+     ]
+    },
+    {
+     "data": {
+      "image/png": 
"iVBORw0KGgoAAAANSUhEUgAAAQUAAAD8CAYAAAB+fLH0AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4wLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvqOYd8AAAIABJREFUeJzsvVvMbUt2HvRV1byuy3/b971Pn3O63afdbZu0jU0sgRRFWCgQLIwQskiQ5YClfgIFAcIOT7wgmRfA4iFSi4CMFCkJYGEeokTIOFEcQtPdtsGOu0/36XPfl7Mv/21d56Vq8FBjVNVc//r35VzaO9Ia0t5r/WvOWbPmnDWrxvjGN8ZQRISd7GQnOxHRf9od2MlOdvJyyW5S2MlOdjKQ3aSwk53sZCC7SWEnO9nJQHaTwk52spOB7CaFnexkJwP5zCYFpdS/qpR6Uyn1llLq
 [...]
+      "text/plain": [
+       "<Figure size 576x288 with 1 Axes>"
+      ]
+     },
+     "metadata": {
+      "needs_background": "light"
+     },
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "# 
https://i.pinimg.com/736x/eb/61/fa/eb61fa047dcd0a20001392c13da93709--mens-fashion-blog-mens-fashion-styles.jpg\n";,
+    "# 2192x4299 - 
https://i.pinimg.com/originals/9c/96/87/9c968732595e965619ef7b0b7e4807e0.jpg\n";,
+    "x, coordinates, imgpath = 
inf.infer('https://i.pinimg.com/originals/9c/96/87/9c968732595e965619ef7b0b7e4807e0.jpg',\n",
+    "                           'Eyes',\n",
+    "                           'Hips')\n",
+    "plt.figure(figsize = (8,4))\n",
+    "plt.imshow(x)\n",
+    "plt.show()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Deploy the algorithm as a function"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## 1. Write the function\n",
+    "\n",
+    "The function expects the following parameters as input:\n",
+    "* `model_url` - the location of the model\n",
+    "* `image` - the image location. It can be given as a request object, or a 
location string (provided no extra authorization headers are required to read 
the file). I.e. \n",
+    "  ```javascript\n",
+    "  {\n",
+    "      uri: \"https://...\",\n";,
+    "      headers: {\n",
+    "          \"Authorization\": \"Bearer ...\",\n",
+    "      }\n",
+    "  }\n",
+    "  ```\n",
+    "* `from_upper` - the upper part of the body to start the crop from. I.e. 
_Eyes_, _Nose_, _Neck_\n",
+    "* `to_lower` - the lower part of the body to stop the crop at. I.e. 
_Hip_, _Knees_, _Ankles_\n",
+    "\n",
+    "For flexibility, this function returns only the information needed to 
crop the body. I.e. \n",
+    "```javascript\n",
+    "{\n",
+    "  X: 100,\n",
+    "  Y: 100,\n",
+    "  W: 200,\n",
+    "  H: 100\n",
+    "}\n",
+    "```\n",
+    "\n",
+    "On the premise that the cropped image may exceed the max response size of 
an action, the actual cropping may be performed by another action, which should 
upload the cropped image to a blob storage. Bellow is the code that can crop 
the image based on the coordinates \n",
+    "\n",
+    "```python\n",
+    "from PIL import Image\n",
+    "import os\n",
+    "\n",
+    "img_crop = Image.open(local_image_path)\n",
+    "\n",
+    "img_crop = img_crop.crop(\n",
+    "    (coordinates.get('X'),                              # left \n",
+    "     coordinates.get('Y'),                              # upper\n",
+    "     coordinates.get('X') + coordinates.get('W'),       # right\n",
+    "     coordinates.get('Y') + coordinates.get('H')))      # lower\n",
+    "     \n",
+    "img_crop_filename = (os.environ.get('__OW_ACTIVATION_ID') or '_local') + 
\".jpg\"\n",
+    "img_crop_path = '/tmp/' + img_crop_filename\n",
+    "\n",
+    "img_crop.save(img_crop_path, \"JPEG\", optimize=True)\n",
+    "\n",
+    "print(\"The cropped image has been saved in:\", img_crop_path)\n",
+    "```"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 25,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Overwriting smart_body_crop.py\n"
+     ]
+    }
+   ],
+   "source": [
+    "%%writefile smart_body_crop.py\n",
+    "\n",
+    "from inference import SmartBodyCrop\n",
+    "import os\n",
+    "\n",
+    "def action_handler(args):\n",
+    "    print(args)\n",
+    "    model_url = args.get('model_url')\n",
+    "    \n",
+    "    body_crop = SmartBodyCrop(model_url = model_url)\n",
+    "    print(\"SmartBodyCrop.initialized=\", SmartBodyCrop.initialized)\n",
+    "    \n",
+    "    crop_img, crop_coordinates, local_image_path = body_crop.infer(\n",
+    "                    args.get('image'), \n",
+    "                    args.get('from_upper'), \n",
+    "                    args.get('to_lower'))\n",
+    "    \n",
+    "    # if you want to crop the image, you can insert the code demonstrated 
above\n",
+    "    # then return the image as a base64 encoded string in the response 
body\n",
+    "    \n",
+    "    return {\n",
+    "        'X': crop_coordinates.get('x'),\n",
+    "        'Y': crop_coordinates.get('y'),\n",
+    "        'W': crop_coordinates.get('width'),\n",
+    "        'H': crop_coordinates.get('height')\n",
+    "    }\n",
+    "    "
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Test the function locally"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 26,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "{'model_url': 'models/optimized_openpose.pb', 'image': 
'https://i.pinimg.com/originals/9c/96/87/9c968732595e965619ef7b0b7e4807e0.jpg', 
'from_upper': 'Eyes', 'to_lower': 'Elbows'}\n",
+      "SmartBodyCrop.initialized= False\n",
+      "image downloaded in :     0.1114\n",
+      "image (2192x4299) loaded in:      0.2350\n",
+      "Loading the model...\n",
+      "model imported in :     1.5238\n",
+      "tf session executed in:      5.0700\n",
+      "pose estimated in:      0.0056\n",
+      "image cropped in:      0.0002\n",
+      "{'H': 1028.0217391304348, 'Y': 467.2826086956522, 'W': 
1031.5294117647059, 'X': 550.9304812834225}\n",
+      "CPU times: user 6.18 s, sys: 1.88 s, total: 8.06 s\n",
+      "Wall time: 5.35 s\n"
+     ]
+    }
+   ],
+   "source": [
+    "%%time\n",
+    "from smart_body_crop import action_handler\n",
+    "action_response = action_handler({ \n",
+    "    'model_url': model_url,\n",
+    "    'image': 
\"https://i.pinimg.com/originals/9c/96/87/9c968732595e965619ef7b0b7e4807e0.jpg\",\n";,
+    "    'from_upper': 'Eyes',\n",
+    "    'to_lower': 'Elbows'})\n",
+    "\n",
+    "print(action_response)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Verify that the returned coordinates are correct"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 27,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "image downloaded in :     0.1307\n",
+      "(2192, 4299)\n"
+     ]
+    },
+    {
+     "data": {
+      "image/png": 
"iVBORw0KGgoAAAANSUhEUgAAANkAAADGCAYAAABfPiU4AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4wLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvqOYd8AAAIABJREFUeJzsvdmvbNl93/dZa+255jrzPXfq2+yBTYqjJWowHUGSJVuwID8EgoIgsIMAfkj8ECAPcf4DI0gCGwkgQEEerCSIbcEZnEhJ4EgiZIkmKVFmk+xu9u3ue2/f6cx1atzjGvKwa9epc/uSajbFVoO4P6Bw6uzau2rvtdZv/v5+SzjneEbP6Bn98Ej+Zd/AM3pGP+r0jMme0TP6IdMzJntGz+iHTM+Y7Bk9ox8yPWOyZ/SMfsj0jMme0TP6IdOHzmRCiL8hhHhTCPG2EOIffNi/
 [...]
+      "text/plain": [
+       "<Figure size 504x216 with 1 Axes>"
+      ]
+     },
+     "metadata": {
+      "needs_background": "light"
+     },
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "imgpath = 
inf._download_image('https://i.pinimg.com/originals/9c/96/87/9c968732595e965619ef7b0b7e4807e0.jpg')\n",
+    "image = Image.open(imgpath)\n",
+    "print(image.size)\n",
+    "\n",
+    "img_crop = image.crop(\n",
+    "    (action_response.get('X'),                              # left \n",
+    "     action_response.get('Y'),                              # upper\n",
+    "     action_response.get('X') + action_response.get('W'),       # 
right\n",
+    "     action_response.get('Y') + action_response.get('H')))      # 
lower\n",
+    "\n",
+    "image = np.asarray(img_crop)\n",
+    "plt.figure(figsize = (7,3))\n",
+    "plt.imshow(image)\n",
+    "plt.show()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## 2. Configure Apache OpenWhisk as the FaaS Provider "
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 28,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Installing wsk CLI ...\n",
+      "wsk\n",
+      "NOTICE.txt\n",
+      "README.md\n",
+      "LICENSE.txt\n"
+     ]
+    },
+    {
+     "name": "stderr",
+     "output_type": "stream",
+     "text": [
+      "  % Total    % Received % Xferd  Average Speed   Time    Time     Time  
Current\n",
+      "                                 Dload  Upload   Total   Spent    Left  
Speed\n",
+      "\r",
+      "  0     0    0     0    0     0      0      0 --:--:-- --:--:-- 
--:--:--     0\r",
+      "  0     0    0     0    0     0      0      0 --:--:-- --:--:-- 
--:--:--     0\r",
+      "100   626    0   626    0     0   1501      0 --:--:-- --:--:-- 
--:--:--  1538\n",
+      "\r",
+      "  3 3845k    3  135k    0     0   115k      0  0:00:33  0:00:01  
0:00:32  115k\r",
+      " 58 3845k   58 2260k    0     0  1050k      0  0:00:03  0:00:02  
0:00:01 2172k\r",
+      "100 3845k  100 3845k    0     0  1344k      0  0:00:02  0:00:02 
--:--:-- 2202k\n"
+     ]
+    }
+   ],
+   "source": [
+    "%%bash\n",
+    "which wsk && exit\n",
+    "echo \"Installing wsk CLI ...\"\n",
+    "curl -L 
https://github.com/apache/incubator-openwhisk-cli/releases/download/latest/OpenWhisk_CLI-latest-linux-amd64.tgz
 -o /tmp/wsk.tgz \n",
+    "tar xvfz /tmp/wsk.tgz -C /tmp/\n",
+    "mv /tmp/wsk /usr/local/bin"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "#### Configure Apache OpenWhisk credentials\n",
+    "\n",
+    "Use `OPENWHISK_AUTH` and `OPENWHISK_APIHOST` environment variables."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 29,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "from pathlib import Path\n",
+    "import os\n",
+    "home = str(Path.home())\n",
+    "file = open(home + \"/.wskprops\",\"w\") \n",
+    "file.write('AUTH=' + os.environ.get('OPENWHISK_AUTH') + \"\\n\")\n",
+    "file.write('APIHOST=' + os.environ.get('OPENWHISK_APIHOST') + \"\\n\")\n",
+    "file.close()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## 3. Deploy the function"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "The function must ZIP the other dependent python scripts used to train 
the model. The action code must be placed in a file called `__main__.py`."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 30,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "  adding: smart_body_crop.py (deflated 58%)\n",
+      "  adding: common.py (deflated 68%)\n",
+      "  adding: inference.py (deflated 73%)\n",
+      "  adding: __main__.py (deflated 58%)\n"
+     ]
+    }
+   ],
+   "source": [
+    "%%bash\n",
+    "mkdir -p action_package\n",
+    "\n",
+    "cp smart_body_crop.py action_package/__main__.py\n",
+    "cp *.py action_package/\n",
+    "cd action_package && zip -9 -r ../action.zip ./"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 31,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "model_url = 
\"https://s3.amazonaws.com/rt-dev-public-models/openpose/2dw1oz9l9hi9avg/optimized_openpose.pb\"";
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 32,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "ok: updated action smart_body_crop\n"
+     ]
+    }
+   ],
+   "source": [
+    "%%bash -s \"$model_url\"\n",
+    "\n",
+    "wsk action update smart_body_crop action.zip --main action_handler  \\\n",
+    "    --param model_url \"$1\" \\\n",
+    "    --param from_upper Eyes \\\n",
+    "    --param to_lower Hips \\\n",
+    "    --memory 3891 \\\n",
+    "    --docker adobeapiplatform/openwhisk-python3aiaction:0.11.0"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 33,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# !wsk action get smart_body_crop"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## 4. Invoke the function"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 34,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "\u001b[32mok:\u001b[0m invoked 
/\u001b[1m_\u001b[0m/\u001b[1msmart_body_crop\u001b[0m with id 
\u001b[1m6c1536170686492a9536170686692a59\u001b[0m\r\n"
+     ]
+    }
+   ],
+   "source": [
+    "!wsk action invoke smart_body_crop --param image 
\"https://i.pinimg.com/originals/9c/96/87/9c968732595e965619ef7b0b7e4807e0.jpg\";
 \\\n",
+    "  --param from_upper Eyes --param to_lower Elbows\n",
+    "# !wsk action invoke smart_body_crop --param image 
\"https://i.pinimg.com/236x/17/1c/a6/171ca6b06111529aa6f10b1f4e418339--style-men-my-style.jpg\";
 \\\n",
+    "#   --param from_upper Eyes --param to_lower Elbows"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Note on first run \n",
+    "On initial run the function has to:\n",
+    "* download the model\n",
+    "* initialize tensorflow \n",
+    "\n",
+    "These steps will take a few seconds."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 35,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "\u001b[32mok:\u001b[0m got activation 
\u001b[1m6c1536170686492a9536170686692a59\u001b[0m\r\n",
+      "{\r\n",
+      "    \"namespace\": \"bladerunner-test\",\r\n",
+      "    \"name\": \"smart_body_crop\",\r\n",
+      "    \"version\": \"0.0.20\",\r\n",
+      "    \"subject\": \"bladerunner-test\",\r\n",
+      "    \"activationId\": \"6c1536170686492a9536170686692a59\",\r\n",
+      "    \"start\": 1545333346300,\r\n",
+      "    \"end\": 1545333353046,\r\n",
+      "    \"duration\": 6746,\r\n",
+      "    \"response\": {\r\n",
+      "        \"status\": \"success\",\r\n",
+      "        \"statusCode\": 0,\r\n",
+      "        \"success\": true,\r\n",
+      "        \"result\": {\r\n",
+      "            \"H\": 1028.0217391304348,\r\n",
+      "            \"W\": 1031.5294117647059,\r\n",
+      "            \"X\": 550.9304812834225,\r\n",
+      "            \"Y\": 467.2826086956522\r\n",
+      "        }\r\n",
+      "    },\r\n",
+      "    \"logs\": [],\r\n",
+      "    \"annotations\": [\r\n",
+      "        {\r\n",
+      "            \"key\": \"path\",\r\n",
+      "            \"value\": \"bladerunner-test/smart_body_crop\"\r\n",
+      "        },\r\n",
+      "        {\r\n",
+      "            \"key\": \"waitTime\",\r\n",
+      "            \"value\": 2787\r\n",
+      "        },\r\n",
+      "        {\r\n",
+      "            \"key\": \"kind\",\r\n",
+      "            \"value\": \"blackbox\"\r\n",
+      "        },\r\n",
+      "        {\r\n",
+      "            \"key\": \"limits\",\r\n",
+      "            \"value\": {\r\n",
+      "                \"concurrency\": 1,\r\n",
+      "                \"logs\": 10,\r\n",
+      "                \"memory\": 3891,\r\n",
+      "                \"timeout\": 60000\r\n",
+      "            }\r\n",
+      "        },\r\n",
+      "        {\r\n",
+      "            \"key\": \"initTime\",\r\n",
+      "            \"value\": 19\r\n",
+      "        }\r\n",
+      "    ],\r\n",
+      "    \"publish\": false\r\n",
+      "}\r\n"
+     ]
+    }
+   ],
+   "source": [
+    "!wsk activation get 6c1536170686492a9536170686692a59"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Invoke the action again\n",
+    "\n",
+    "This time it should respond much faster as it has been pre-warmed."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 36,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "\u001b[32mok:\u001b[0m invoked 
/\u001b[1m_\u001b[0m/\u001b[1msmart_body_crop\u001b[0m with id 
\u001b[1mfd729b5d47e6415ab29b5d47e6915aa9\u001b[0m\n",
+      "{\n",
+      "    \"activationId\": \"fd729b5d47e6415ab29b5d47e6915aa9\",\n",
+      "    \"annotations\": [\n",
+      "        {\n",
+      "            \"key\": \"limits\",\n",
+      "            \"value\": {\n",
+      "                \"concurrency\": 1,\n",
+      "                \"logs\": 10,\n",
+      "                \"memory\": 3891,\n",
+      "                \"timeout\": 60000\n",
+      "            }\n",
+      "        },\n",
+      "        {\n",
+      "            \"key\": \"path\",\n",
+      "            \"value\": \"bladerunner-test/smart_body_crop\"\n",
+      "        },\n",
+      "        {\n",
+      "            \"key\": \"kind\",\n",
+      "            \"value\": \"blackbox\"\n",
+      "        },\n",
+      "        {\n",
+      "            \"key\": \"waitTime\",\n",
+      "            \"value\": 6\n",
+      "        }\n",
+      "    ],\n",
+      "    \"duration\": 2160,\n",
+      "    \"end\": 1545333364902,\n",
+      "    \"logs\": [],\n",
+      "    \"name\": \"smart_body_crop\",\n",
+      "    \"namespace\": \"bladerunner-test\",\n",
+      "    \"publish\": false,\n",
+      "    \"response\": {\n",
+      "        \"result\": {\n",
+      "            \"H\": 1028.0217391304348,\n",
+      "            \"W\": 1031.5294117647059,\n",
+      "            \"X\": 550.9304812834225,\n",
+      "            \"Y\": 467.2826086956522\n",
+      "        },\n",
+      "        \"status\": \"success\",\n",
+      "        \"success\": true\n",
+      "    },\n",
+      "    \"start\": 1545333362742,\n",
+      "    \"subject\": \"bladerunner-test\",\n",
+      "    \"version\": \"0.0.20\"\n",
+      "}\n",
+      "CPU times: user 100 ms, sys: 140 ms, total: 240 ms\n",
+      "Wall time: 3.42 s\n"
+     ]
+    }
+   ],
+   "source": [
+    "%%time\n",
+    "!wsk action invoke smart_body_crop --param image 
\"https://i.pinimg.com/originals/9c/96/87/9c968732595e965619ef7b0b7e4807e0.jpg\";
 \\\n",
+    "  --param from_upper Eyes --param to_lower Elbows -b"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": []
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "Python 3",
+   "language": "python",
+   "name": "python3"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 3
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython3",
+   "version": "3.5.2"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/core/python3AiActionLoop/samples/smart-body-crop/fashion-men-1.jpg 
b/core/python3AiActionLoop/samples/smart-body-crop/fashion-men-1.jpg
new file mode 100644
index 0000000..8d440e3
Binary files /dev/null and 
b/core/python3AiActionLoop/samples/smart-body-crop/fashion-men-1.jpg differ
diff --git a/core/python3AiActionLoop/samples/smart-body-crop/inference.py 
b/core/python3AiActionLoop/samples/smart-body-crop/inference.py
new file mode 100644
index 0000000..dbb5f87
--- /dev/null
+++ b/core/python3AiActionLoop/samples/smart-body-crop/inference.py
@@ -0,0 +1,246 @@
+"""Executable Python script for running Python actions.
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+"""
+
+'''
+Some is based on Ildoo Kim's code (https://github.com/ildoonet/tf-openpose) 
and https://gist.github.com/alesolano/b073d8ec9603246f766f9f15d002f4f4
+and derived from the OpenPose Library 
(https://github.com/CMU-Perceptual-Computing-Lab/openpose/blob/master/LICENSE)
+'''
+
+import tensorflow as tf
+import numpy as np
+from PIL import Image
+from tensorflow.core.framework import graph_pb2
+import urllib3
+import certifi
+import os
+import shutil
+
+from common import estimate_pose, crop_image, draw_humans
+
+import time
+
+
+def print_time(message, start):
+    print(message, "{:10.4f}".format(time.time() - start))
+    return time.time()
+
+
+class SmartBodyCrop:
+    initialized = False
+    tmp_path = '/tmp/'
+    tmpfs_path = '/mnt/action/'
+
+    def __init__(self, model_url):
+        self.model_url = model_url
+
+    def read_img(self, imgpath, width, height):
+        img = Image.open(imgpath)
+        orig_width, orig_height = img.size
+        # resize the image to match openpose's training data
+        # https://github.com/ildoonet/tf-pose-estimation#inference-time
+        img.thumbnail((width, height))
+        thumbnail_w, thumbnail_h = img.size
+        #val_img = val_img.resize((width, height))
+        val_img = np.asarray(img, dtype=np.float32)
+        val_img = val_img.reshape([1, thumbnail_h, thumbnail_w, 3])
+        # val_img = val_img.astype(float)
+        val_img = val_img * (2.0 / 255.0) - 1.0  # normalization
+
+        return val_img, img, orig_width, orig_height
+
+    def _download_model(self):
+        # check if the model is a ref to local file path
+        if type(self.model_url) is str:
+            if not self.model_url.startswith('http'):
+                return self.model_url
+
+        start = time.time()
+        local_model_path = SmartBodyCrop.tmp_path + 'optimized_openpose.pb'
+        tmpfs_model_path = SmartBodyCrop.tmpfs_path + 'optimized_openpose.pb'
+
+        if (os.path.isfile(local_model_path)):
+            print_time("model was found in the local storage: " +
+                       local_model_path, start)
+            return local_model_path
+
+        # check if this model was downloaded by another invocation in the 
tmpfs path
+        if (os.path.isfile(tmpfs_model_path)):
+            print_time("model was found in the tmpfs storage: " +
+                       tmpfs_model_path, start)
+            shutil.copy(tmpfs_model_path, local_model_path)
+            print_time("model copied FROM tmpfs:" + tmpfs_model_path, start)
+            return local_model_path
+
+        http = urllib3.PoolManager(
+            cert_reqs='CERT_REQUIRED',
+            ca_certs=certifi.where(),
+            headers={
+                'Accept': 'application/octet-stream',
+                'Content-Type': 'application/octet-stream'
+            })
+        urllib3.disable_warnings()
+
+        r = http.request('GET', self.model_url,
+                         preload_content=False,
+                         retries=urllib3.Retry(5, redirect=5))
+
+        with open(local_model_path, 'wb') as out:
+            while True:
+                data = r.read(8192)  # 64 # 8192
+                if not data:
+                    break
+                out.write(data)
+
+        r.release_conn()
+        print_time("model downloaded in :", start)
+
+        # copy the file to the tmpfs_model_path to be reused by other actions
+        # this seems to work concurrently as per: 
https://stackoverflow.com/questions/35605463/why-is-concurrent-copy-of-a-file-not-failing
+        if (os.path.isdir(SmartBodyCrop.tmpfs_path)):
+            shutil.copy(local_model_path, tmpfs_model_path)
+            print_time("model copied to tmpfs:" + tmpfs_model_path, start)
+
+        return local_model_path
+
+    def _download_image(self, image):
+        start = time.time()
+        headers = {}
+        image_url = image
+        local_image_path = SmartBodyCrop.tmp_path + 'image'
+        if type(image) is dict:
+            headers = image.get('headers')
+            image_url = image.get('uri')
+        # check if the image is a local file path
+        if type(image) is str:
+            if not image.startswith('http'):
+                return image
+
+        http = urllib3.PoolManager(
+            cert_reqs='CERT_REQUIRED',
+            ca_certs=certifi.where(),
+            headers=headers)
+        urllib3.disable_warnings()
+
+        r = http.request('GET', image_url,
+                         preload_content=False,
+                         retries=urllib3.Retry(5, redirect=5))
+
+        with open(local_image_path, 'wb') as out:
+            while True:
+                data = r.read(1024)  # 8192
+                if not data:
+                    break
+                out.write(data)
+
+        r.release_conn()
+        print_time("image downloaded in :", start)
+        return local_image_path
+
+    def load_graph_def(self):
+        start = time.time()
+
+        local_model_path = self._download_model()
+
+        tf.reset_default_graph()
+        graph_def = graph_pb2.GraphDef()
+        with open(local_model_path, 'rb') as f:
+            graph_def.ParseFromString(f.read())
+        tf.import_graph_def(graph_def, name='')
+
+        start = print_time("model imported in :", start)
+        start = time.time()
+
+        # SmartBodyCrop.initialized = True
+
+    def infer(self, image, upper_body, lower_body):
+        start = time.time()
+
+        imgpath = self._download_image(image)
+        image, thumbnail, input_width, input_height = self.read_img(
+            imgpath, 368, 368)
+        start = print_time("image (" + str(input_width) +
+                           "x" + str(input_height) + ") loaded in: ", start)
+
+        if not SmartBodyCrop.initialized:
+            print("Loading the model...")
+            self.load_graph_def()
+
+        with tf.Session() as sess:
+            inputs = tf.get_default_graph().get_tensor_by_name('inputs:0')
+            heatmaps_tensor = tf.get_default_graph().get_tensor_by_name(
+                'Mconv7_stage6_L2/BiasAdd:0')
+            pafs_tensor = tf.get_default_graph().get_tensor_by_name(
+                'Mconv7_stage6_L1/BiasAdd:0')
+
+            heatMat, pafMat = sess.run(
+                [heatmaps_tensor, pafs_tensor], feed_dict={inputs: image})
+
+            start = print_time("tf session executed in: ", start)
+
+            humans = estimate_pose(heatMat[0], pafMat[0])
+            start = print_time("pose estimated in: ", start)
+            # send the thumbnail to render an initial crop
+            img, crop_position, crop_size = crop_image(
+                thumbnail, humans, upper_body, lower_body)
+            # scale back the crop_coordinates to match the original picture 
size
+            scale_factor_w = input_width / thumbnail.size[0]
+            scale_factor_h = input_height / thumbnail.size[1]
+            crop_coordinates = {
+                'x':      crop_position[0] * scale_factor_w,
+                'y':      crop_position[1] * scale_factor_h,
+                'width':  crop_size[0] * scale_factor_w,
+                'height': crop_size[1] * scale_factor_h
+            }
+
+            start = print_time("image cropped in: ", start)
+
+            sess.close()
+            return img, crop_coordinates, imgpath
+
+    def detect_parts(self, image):
+        start = time.time()
+
+        imgpath = self._download_image(image)
+        image, thumbnail, input_width, input_height = self.read_img(
+            imgpath, 368, 368)
+        start = print_time("image loaded in: ", start)
+
+        if not SmartBodyCrop.initialized:
+            print("Loading the model...")
+            self.load_graph_def()
+
+        with tf.Session() as sess:
+            inputs = tf.get_default_graph().get_tensor_by_name('inputs:0')
+            heatmaps_tensor = tf.get_default_graph().get_tensor_by_name(
+                'Mconv7_stage6_L2/BiasAdd:0')
+            pafs_tensor = tf.get_default_graph().get_tensor_by_name(
+                'Mconv7_stage6_L1/BiasAdd:0')
+
+            heatMat, pafMat = sess.run(
+                [heatmaps_tensor, pafs_tensor], feed_dict={inputs: image})
+
+            start = print_time("tf session executed in: ", start)
+
+            humans = estimate_pose(heatMat[0], pafMat[0])
+            start = print_time("pose estimated in: ", start)
+
+            # display
+            img1 = draw_humans(thumbnail, humans)
+            return img1
diff --git a/core/pythonActionLoop/pythonbuild.py 
b/core/pythonActionLoop/pythonbuild.py
deleted file mode 100755
index b064f2d..0000000
--- a/core/pythonActionLoop/pythonbuild.py
+++ /dev/null
@@ -1,110 +0,0 @@
-#!/usr/bin/env python3
-"""Python Action Compiler
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-"""
-
-from __future__ import print_function
-import os
-import sys
-import codecs
-import subprocess
-
-
-def copy(src, dst):
-    with codecs.open(src, 'r', 'utf-8') as s:
-        body = s.read()
-        with codecs.open(dst, 'w', 'utf-8') as d:
-            d.write(body)
-
-# if there is an exec copy to main__.py
-# else if there is a __main__.py copy to main__.py
-# (exec prevails over __main__.py)
-# then copy the launcher in exec__.py replacing the main function
-def sources(launcher, source_dir, main):
-    # source and dest
-    src = "%s/exec" % source_dir
-    dst = "%s/main__.py" % source_dir
-    # copy exec to main__.py
-    if os.path.isfile(src):
-        copy(src,dst)
-    else:
-        # renaming __main__ to main__
-        src = "%s/__main__.py" % source_dir
-        if os.path.isfile(src):
-            copy(src, dst)
-
-    # copy a launcher
-    starter = "%s/exec__.py" % source_dir
-    with codecs.open(launcher, 'r', 'utf-8') as s:
-        with codecs.open(starter, 'w', 'utf-8') as d:
-            body = s.read()
-            body = body.replace("from main__ import main as main",
-                                "from main__ import %s as main" % main)
-            d.write(body)
-    return starter
-
-# build the launcher but only if there is the main
-def build(source_dir, target_file, launcher):
-    main = "%s/main__.py" % source_dir
-    cmd = "#!/bin/bash"
-    if os.path.isfile(main):
-        cmd += """
-cd %s
-exec "$(which python)" %s "$@"
-""" % (source_dir, launcher)
-    else:
-        cmd += """
-echo "Zip file does not include mandatory files."
-"""
-    with codecs.open(target_file, 'w', 'utf-8') as d:
-        d.write(cmd)
-    os.chmod(target_file, 0o755)
-
-def compile(argv):
-    if len(argv) < 4:
-        sys.stdout.write("usage: <main-function> <source-dir> <target-dir>\n")
-        sys.exit(1)
-
-    main = argv[1]
-    source_dir = os.path.abspath(argv[2])
-    target_file = os.path.abspath("%s/exec" % argv[3])
-    launcher = os.path.abspath(argv[0]+".launcher.py")
-    starter = sources(launcher, source_dir, main)
-    build(source_dir, target_file, starter)
-    sys.stdout.flush()
-    sys.stderr.flush()
-    return target_file
-
-
-if __name__ == '__main__':
-    p = subprocess.Popen([compile(sys.argv), "exit"],
-        stdout=subprocess.PIPE,
-        stderr=subprocess.PIPE)
-    (o, e) = p.communicate()
-    if isinstance(o, bytes) and not isinstance(o, str):
-        o = o.decode('utf-8')
-    if isinstance(e, bytes) and not isinstance(e, str):
-        e = e.decode('utf-8')
-    if o:
-        sys.stdout.write(o)
-        sys.stdout.flush()
-
-    if e:
-        sys.stderr.write(e)
-        sys.stderr.flush()
-
diff --git a/settings.gradle b/settings.gradle
index 4d36df7..35888bc 100644
--- a/settings.gradle
+++ b/settings.gradle
@@ -17,10 +17,11 @@
 
 include 'tests'
 
-include 'core:pythonAction'
+include 'core:python2ActionLoop'
+include 'core:python3Action'
+include 'core:python3ActionLoop'
 include 'core:python3AiAction'
-include 'core:pythonActionLoop'
-
+include 'core:python3AiActionLoop'
 
 rootProject.name = 'runtime-python'
 
diff --git a/settings.gradle 
b/tests/src/test/scala/runtime/actionContainers/Python2ActionLoopContainerTests.scala
similarity index 59%
copy from settings.gradle
copy to 
tests/src/test/scala/runtime/actionContainers/Python2ActionLoopContainerTests.scala
index 4d36df7..23dae23 100644
--- a/settings.gradle
+++ 
b/tests/src/test/scala/runtime/actionContainers/Python2ActionLoopContainerTests.scala
@@ -15,25 +15,22 @@
  * limitations under the License.
  */
 
-include 'tests'
+package runtime.actionContainers
 
-include 'core:pythonAction'
-include 'core:python3AiAction'
-include 'core:pythonActionLoop'
+import common.WskActorSystem
+import org.junit.runner.RunWith
+import org.scalatest.junit.JUnitRunner
 
+@RunWith(classOf[JUnitRunner])
+class Python2ActionLoopContainerTests
+    extends PythonActionContainerTests
+    with PythonActionLoopExtraTests
+    with WskActorSystem {
 
-rootProject.name = 'runtime-python'
+  override lazy val imageName = "actionloop-python-v2.7"
 
-gradle.ext.openwhisk = [
-        version: '1.0.0-SNAPSHOT'
-]
+  override val testNoSource = TestConfig("", hasCodeStub = false)
 
-gradle.ext.scala = [
-    version: '2.12.7',
-    compileFlags: ['-feature', '-unchecked', '-deprecation', 
'-Xfatal-warnings', '-Ywarn-unused-import']
-]
-
-gradle.ext.scalafmt = [
-    version: '1.5.0',
-    config: new File(rootProject.projectDir, '.scalafmt.conf')
-]
+  /** actionloop based image does not log init errors - return the error in 
the body */
+  override lazy val errorCodeOnRun = false
+}
diff --git 
a/tests/src/test/scala/runtime/actionContainers/Python3AiActionLoopContainerTests.scala
 
b/tests/src/test/scala/runtime/actionContainers/Python3AiActionLoopContainerTests.scala
new file mode 100644
index 0000000..8f40869
--- /dev/null
+++ 
b/tests/src/test/scala/runtime/actionContainers/Python3AiActionLoopContainerTests.scala
@@ -0,0 +1,155 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package runtime.actionContainers
+
+import org.junit.runner.RunWith
+import org.scalatest.junit.JUnitRunner
+import common.WskActorSystem
+import spray.json._
+import DefaultJsonProtocol._
+
+@RunWith(classOf[JUnitRunner])
+class Python3AiActionLoopContainerTests
+    extends PythonActionContainerTests
+    with PythonActionLoopExtraTests
+    with WskActorSystem {
+
+  override lazy val imageName = "actionloop-python-v3.6-ai"
+
+  override lazy val errorCodeOnRun = false
+
+  override val testNoSource = TestConfig("", hasCodeStub = false)
+
+  it should "run tensorflow" in {
+    val (out, err) = withActionContainer() { c =>
+      val code =
+        """
+          |import tensorflow as tf
+          |def main(args):
+          |   # Initialize two constants
+          |   x1 = tf.constant([1,2,3,4])
+          |   x2 = tf.constant([5,6,7,8])
+          |
+          |   # Multiply
+          |   result = tf.multiply(x1, x2)
+          |
+          |   # Initialize Session and run `result`
+          |   with tf.Session() as sess:
+          |       output = sess.run(result)
+          |       print(output)
+          |       return { "response": output.tolist() }
+        """.stripMargin
+
+      val (initCode, res) = c.init(initPayload(code))
+      initCode should be(200)
+
+      val (runCode, runRes) = c.run(runPayload(JsObject()))
+      runCode should be(200)
+
+      runRes shouldBe defined
+      runRes should be(Some(JsObject("response" -> List(5, 12, 21, 
32).toJson)))
+    }
+  }
+
+  it should "run pytorch" in {
+    val (out, err) = withActionContainer() { c =>
+      val code =
+        """
+          |import torch
+          |import torchvision
+          |import torch.nn as nn
+          |import numpy as np
+          |import torchvision.transforms as transforms
+          |def main(args):
+          |   # Create a numpy array.
+          |   x = np.array([1,2,3,4])
+          |
+          |   # Convert the numpy array to a torch tensor.
+          |   y = torch.from_numpy(x)
+          |
+          |   # Convert the torch tensor to a numpy array.
+          |   z = y.numpy()
+          |   return { "response": z.tolist()}
+        """.stripMargin
+
+      val (initCode, res) = c.init(initPayload(code))
+      initCode should be(200)
+
+      val (runCode, runRes) = c.run(runPayload(JsObject()))
+      runCode should be(200)
+
+      runRes shouldBe defined
+      runRes should be(Some(JsObject("response" -> List(1, 2, 3, 4).toJson)))
+    }
+  }
+
+  it should "support numpy" in {
+    val (out, err) = withActionContainer() { c =>
+      val code =
+        """
+          |import numpy as np
+          |def main(args):
+          |   a = np.arange(15).reshape(3, 5).tolist()
+          |   return { "array": a }
+        """.stripMargin
+
+      // action loop detects those errors at init time
+      val (initCode, initRes) = c.init(initPayload(code))
+      initCode should be(200)
+      println(initCode, initRes)
+
+      val (runCode, runRes) = c.run(runPayload(JsObject()))
+      runCode should be(200)
+      runRes.get.fields.get("array") should not be empty
+    }
+    println(out)
+    println(err)
+    checkStreams(out, err, {
+      case (o, e) =>
+        o shouldBe empty
+        e shouldBe empty
+    })
+  }
+
+  it should "detect numpy failures" in {
+    val (out, err) = withActionContainer() { c =>
+      val code =
+        """
+          |import numpy as np
+          |def main(args):
+          |   a = np.arange(15).reshape(3, 5)
+          |   return { "array": a }
+        """.stripMargin
+
+      // action loop detects those errors at init time
+      val (initCode, initRes) = c.init(initPayload(code))
+      initCode should be(200)
+      println(initCode, initRes)
+
+      val (runCode, _) = c.run(runPayload(JsObject()))
+      runCode should be(400)
+    }
+    println(out)
+    println(err)
+    checkStreams(out, err, {
+      case (o, e) =>
+        o shouldBe empty
+        e should include("Object of type 'ndarray' is not JSON serializable")
+    })
+  }
+}
diff --git 
a/tests/src/test/scala/runtime/actionContainers/PythonActionContainerTests.scala
 
b/tests/src/test/scala/runtime/actionContainers/PythonActionContainerTests.scala
index b74c974..0c11bc3 100644
--- 
a/tests/src/test/scala/runtime/actionContainers/PythonActionContainerTests.scala
+++ 
b/tests/src/test/scala/runtime/actionContainers/PythonActionContainerTests.scala
@@ -31,8 +31,8 @@ class PythonActionContainerTests extends 
BasicActionRunnerTests with WskActorSys
 
   lazy val imageName = "python3action"
 
-  /** indicates if errors are logged or returned in the answer */
-  lazy val initErrorsAreLogged = true
+  /** actionLoop does not return an error code on failed run */
+  lazy val errorCodeOnRun = true
 
   override def withActionContainer(env: Map[String, String] = Map.empty)(code: 
ActionContainer => Unit) = {
     withContainer(imageName, env)(code)
@@ -75,9 +75,10 @@ class PythonActionContainerTests extends 
BasicActionRunnerTests with WskActorSys
 
   override val testUnicode =
     TestConfig("""
+        |# encoding: utf-8
         |def main(args):
         |    sep = args['delimiter']
-        |    str = sep + " ☃ " + sep
+        |    str = sep + u" ☃ " + sep
         |    print(str)
         |    return {"winter" : str }
       """.stripMargin.trim)
@@ -180,16 +181,13 @@ class PythonActionContainerTests extends 
BasicActionRunnerTests with WskActorSys
     val (out, err) = withActionContainer() { c =>
       val (initCode, initRes) = c.init(initPayload(code, main = "echo"))
       initCode should be(502)
-      if (!initErrorsAreLogged)
-        initRes.get.fields.get("error").get.toString() should include("Zip 
file does not include")
     }
 
-    if (initErrorsAreLogged)
-      checkStreams(out, err, {
-        case (o, e) =>
-          o shouldBe empty
-          e should include("Zip file does not include")
-      })
+    checkStreams(out, err, {
+      case (o, e) =>
+        o shouldBe empty
+        e should include("Zip file does not include")
+    })
   }
 
   it should "return on action error when action fails" in {
@@ -211,7 +209,7 @@ class PythonActionContainerTests extends 
BasicActionRunnerTests with WskActorSys
        * Since it only receive a string from the application
        * it should parse the entire string  in JSON just to find it is an 
"error"
        */
-      if (initErrorsAreLogged)
+      if (errorCodeOnRun)
         runCode should be(502)
 
       runRes shouldBe defined
@@ -237,7 +235,7 @@ class PythonActionContainerTests extends 
BasicActionRunnerTests with WskActorSys
       // init checks whether compilation was successful, so return 502
       initCode should be(502)
     }
-    if (initErrorsAreLogged)
+    if (errorCodeOnRun)
       checkStreams(out, err, {
         case (o, e) =>
           o shouldBe empty
@@ -279,22 +277,15 @@ class PythonActionContainerTests extends 
BasicActionRunnerTests with WskActorSys
           |    return { "error": "not reaching here" }
         """.stripMargin
 
-      if (initErrorsAreLogged) {
-        val (initCode, res) = c.init(initPayload(code))
-        initCode should be(502)
-      } else {
-        // action loop detects those errors at init time
-        val (initCode, initRes) = c.init(initPayload(code))
-        initCode should be(502)
-        initRes.get.fields.get("error").get.toString() should 
include("Traceback")
-      }
+      // action loop detects those errors at init time
+      val (initCode, _) = c.init(initPayload(code))
+      initCode should be(502)
     }
-    if (initErrorsAreLogged)
-      checkStreams(out, err, {
-        case (o, e) =>
-          o shouldBe empty
-          e should include("Traceback")
-      })
+    checkStreams(out, err, {
+      case (o, e) =>
+        o shouldBe empty
+        e should include regex ("Traceback|cannot start")
+    })
   }
 
   it should "have a valid sys.executable" in {
diff --git 
a/tests/src/test/scala/runtime/actionContainers/PythonActionLoopContainerTests.scala
 
b/tests/src/test/scala/runtime/actionContainers/PythonActionLoopContainerTests.scala
index 9a5fca9..f1e9f82 100644
--- 
a/tests/src/test/scala/runtime/actionContainers/PythonActionLoopContainerTests.scala
+++ 
b/tests/src/test/scala/runtime/actionContainers/PythonActionLoopContainerTests.scala
@@ -26,14 +26,17 @@ import org.scalatest.junit.JUnitRunner
 import spray.json._
 
 @RunWith(classOf[JUnitRunner])
-class PythonActionLoopContainerTests extends PythonActionContainerTests with 
WskActorSystem {
+class PythonActionLoopContainerTests
+    extends PythonActionContainerTests
+    with PythonActionLoopExtraTests
+    with WskActorSystem {
 
   override lazy val imageName = "actionloop-python-v3.7"
 
   override val testNoSource = TestConfig("", hasCodeStub = false)
 
   /** actionloop based image does not log init errors - return the error in 
the body */
-  override lazy val initErrorsAreLogged = false
+  override lazy val errorCodeOnRun = false
 
   def testArtifact(name: String): File = {
     new File(this.getClass.getClassLoader.getResource(name).toURI)
@@ -75,11 +78,11 @@ class PythonActionLoopContainerTests extends 
PythonActionContainerTests with Wsk
       val (initCode, initRes) = c.init(initPayload(code, main = "main"))
       initCode should be(502)
 
-      if (!initErrorsAreLogged)
-        initRes.get.fields.get("error").get.toString should include("Zip file 
does not include mandatory files")
+      if (!errorCodeOnRun)
+        initRes.get.fields.get("error").get.toString should include regex ("No 
module|action failed")
     }
 
-    if (initErrorsAreLogged)
+    if (errorCodeOnRun)
       checkStreams(out, err, {
         case (o, e) =>
           o shouldBe empty
@@ -95,11 +98,11 @@ class PythonActionLoopContainerTests extends 
PythonActionContainerTests with Wsk
       val (initCode, initRes) = c.init(initPayload(code, main = "main"))
       initCode should be(502)
 
-      if (!initErrorsAreLogged)
-        initRes.get.fields.get("error").get.toString should include("Invalid 
virtualenv. Zip file does not include")
+      if (!errorCodeOnRun)
+        initRes.get.fields.get("error").get.toString should include regex 
("Invalid virtualenv|action failed")
     }
 
-    if (initErrorsAreLogged)
+    if (errorCodeOnRun)
       checkStreams(out, err, {
         case (o, e) =>
           o shouldBe empty
diff --git 
a/tests/src/test/scala/runtime/actionContainers/PythonActionLoopExtraTests.scala
 
b/tests/src/test/scala/runtime/actionContainers/PythonActionLoopExtraTests.scala
new file mode 100644
index 0000000..04a8784
--- /dev/null
+++ 
b/tests/src/test/scala/runtime/actionContainers/PythonActionLoopExtraTests.scala
@@ -0,0 +1,95 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package runtime.actionContainers
+
+import spray.json.{JsObject, JsString}
+
+trait PythonActionLoopExtraTests {
+  this: PythonActionContainerTests =>
+
+  it should "detect termination at run" in {
+    val (out, err) = withActionContainer() { c =>
+      val code =
+        """
+          |import sys
+          |def main(args):
+          |  sys.exit(1)
+        """.stripMargin
+
+      // action loop detects those errors at init time
+      val (initCode, _) = c.init(initPayload(code))
+      initCode should be(200)
+
+      val (runCode, runRes) = c.run(runPayload(JsObject()))
+      runCode should be(400)
+      println(runCode, runRes)
+      runRes.get.fields.get("error").get.toString() should include("command 
exited")
+    }
+    checkStreams(out, err, {
+      case (o, e) =>
+        o shouldBe empty
+        e shouldBe empty
+    })
+  }
+
+  it should "detect termination at init" in {
+    val (out, err) = withActionContainer() { c =>
+      val code =
+        """
+          |import sys
+          |sys.exit(1)
+          |def main(args):
+          |   pass
+        """.stripMargin
+
+      // action loop detects those errors at init time
+      val (initCode, initRes) = c.init(initPayload(code))
+      initCode should be(502)
+      initRes.get.fields.get("error").get.toString() should include("Cannot 
start action")
+    }
+    checkStreams(out, err, {
+      case (o, e) =>
+        o shouldBe empty
+        e should include("command exited before ack")
+    })
+  }
+
+  it should "read an environment variable" in {
+    val (out, err) = withActionContainer() { c =>
+      val code = """
+                   |import os
+                   |X = os.getenv('X')
+                   |print(X)
+                   |def main(args):
+                   |   return { "body": "ok" }
+                 """.stripMargin
+
+      // action loop detects those errors at init time
+      val (initCode, _) = c.init(initPayload(code, "main", Some(Map("X" -> 
JsString("xyz")))))
+      initCode should be(200)
+
+      val (runCode, runRes) = c.run(runPayload(JsObject()))
+      runCode should be(200)
+      runRes.get.fields.get("body").get shouldBe JsString("ok")
+    }
+    checkStreams(out, err, {
+      case (o, e) =>
+        o should include("xyz")
+        e shouldBe empty
+    })
+  }
+}

Reply via email to