This is an automated email from the ASF dual-hosted git repository.

mrutkowski pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/openwhisk-wskdeploy.git


The following commit(s) were added to refs/heads/master by this push:
     new d653592  chore: fix spelling (#1127)
d653592 is described below

commit d653592d8b8e8b7e64f228049bfa2f74db07a1e1
Author: John Bampton <[email protected]>
AuthorDate: Sat Mar 27 00:39:09 2021 +1000

    chore: fix spelling (#1127)
    
    * chore: fix spelling
    
    * Regen. i18n_resources.go
    
    Co-authored-by: Matt Rutkowski <[email protected]>
---
 deployers/deploymentreader_test.go                 |   4 +-
 deployers/whiskclient_test.go                      |   2 +-
 docs/sync_projects_between_client_and_server.md    |   2 +-
 docs/wskdeploy_action_env_var_parms.md             |   2 +-
 docs/wskdeploy_apigateway_http.md                  |   4 +-
 docs/wskdeploy_apigateway_http_sequence.md         |   2 +-
 docs/wskdeploy_apigateway_open_api_spec.md         |   2 +-
 docs/wskdeploy_faq.md                              |   2 +-
 docs/wskdeploy_sequence_basic.md                   |   4 +-
 parsers/manifest_parser.go                         |   2 +-
 specification/html/spec_packages.md                |   2 +-
 specification/html/spec_rule.md                    |   2 +-
 specification/html/spec_trigger.md                 |   2 +-
 .../actions/find-delayed-pull-requests.js          |   4 +-
 .../actions/track-pull-requests.js                 |   2 +-
 utils/file.go                                      |   2 +-
 utils/misc_test.go                                 |   2 +-
 utils/validation.go                                |   2 +-
 utils/zip.go                                       |  24 +-
 wskenv/environment_test.go                         |   2 +-
 wski18n/i18n_resources.go                          | 397 +++++----------------
 wski18n/resources/en_US.all.json                   |  12 +-
 22 files changed, 124 insertions(+), 355 deletions(-)

diff --git a/deployers/deploymentreader_test.go 
b/deployers/deploymentreader_test.go
index b2c1f2a..3a44bfa 100644
--- a/deployers/deploymentreader_test.go
+++ b/deployers/deploymentreader_test.go
@@ -95,7 +95,7 @@ func testLoadAndBindDeploymentYAML(t *testing.T, path string, 
triggerName string
        dReader := NewDeploymentReader(sDeployer)
        err := dReader.HandleYaml()
 
-       // DEBUG() Uncomment to display initial DeploymentDescriptor (manifest, 
deployemnt befopre binding)
+       // DEBUG() Uncomment to display initial DeploymentDescriptor (manifest, 
deployment before binding)
        //fmt.Println(utils.ConvertMapToJSONString("BEFORE: 
dReader.DeploymentDescriptor", dReader.DeploymentDescriptor))
        //fmt.Println(utils.ConvertMapToJSONString("BEFORE: 
sDeployer.Deployment", sDeployer.Deployment))
 
@@ -131,7 +131,7 @@ func TestDeploymentReader_ProjectBindTrigger(t *testing.T) {
        // Create an annotation (in manifest representation) with key we 
expect, with value that should be overwritten
        TEST_ANNOTATION := whisk.KeyValue{TEST_ANNOTATION_KEY, "foo"}
 
-       // create ServicedEployer
+       // create ServiceDeployer
        sDeployer, dReader := testLoadAndBindDeploymentYAML(t, TEST_DATA, 
TEST_TRIGGER, TEST_ANNOTATION)
 
        // test Project exists with expected name in Deployment file
diff --git a/deployers/whiskclient_test.go b/deployers/whiskclient_test.go
index 3c636c5..acbc29f 100644
--- a/deployers/whiskclient_test.go
+++ b/deployers/whiskclient_test.go
@@ -282,5 +282,5 @@ func TestNewWhiskConfigWithAdditionalHeaders(t *testing.T) {
        newHeaderValue := "NewValue"
        AddAdditionalHeader(newHeader, newHeaderValue)
        config, _ := NewWhiskConfig(propPath, deploymentPath, manifestPath)
-       assert.Equal(t, newHeaderValue, 
config.AdditionalHeaders.Get(newHeader), "Failed to set an addtional header")
+       assert.Equal(t, newHeaderValue, 
config.AdditionalHeaders.Get(newHeader), "Failed to set an additional header")
 }
diff --git a/docs/sync_projects_between_client_and_server.md 
b/docs/sync_projects_between_client_and_server.md
index 645263b..4576d15 100644
--- a/docs/sync_projects_between_client_and_server.md
+++ b/docs/sync_projects_between_client_and_server.md
@@ -33,7 +33,7 @@ whisk-managed:
 
 > Where the text “OpenWhisk” is a constant prefix and “\0” is the NULL 
 > character. The <size_of_manifest_file> and <contents_of_manifest_file> vary 
 > depending on the file.
 
-Now, subsequent deployments of the same project in `sync` mode, calculates a 
new `projectHash` on client and compares it with the one on the server for 
every entity in that project. This comparision could lead us to following two 
scenarios:
+Now, subsequent deployments of the same project in `sync` mode, calculates a 
new `projectHash` on client and compares it with the one on the server for 
every entity in that project. This comparison could lead us to following two 
scenarios:
 
 * **Scenario 1:** If `projectHash` on client is same as `projectHash` on the 
server i.e. there were no changes in the project on the client side, the 
project on server side is left as is except wskdeploy redeploys all the 
entities from manifest file to capture any changes in deployment file.
 
diff --git a/docs/wskdeploy_action_env_var_parms.md 
b/docs/wskdeploy_action_env_var_parms.md
index 86782b9..c4dcf40 100644
--- a/docs/wskdeploy_action_env_var_parms.md
+++ b/docs/wskdeploy_action_env_var_parms.md
@@ -86,7 +86,7 @@ In this example:
 - we further demonstrated how string values from environment variables could 
be concatenated with other strings within a Manifest file
 
 ### Notes:
-- These methods for binidng environment variables to input parameters are also 
available within Deployment files.
+- These methods for binding environment variables to input parameters are also 
available within Deployment files.
 
 ### Source code
 The manifest file for this example can be found here:
diff --git a/docs/wskdeploy_apigateway_http.md 
b/docs/wskdeploy_apigateway_http.md
index 1a7c3e0..33267d7 100644
--- a/docs/wskdeploy_apigateway_http.md
+++ b/docs/wskdeploy_apigateway_http.md
@@ -87,9 +87,9 @@ Because a HTTP response disables the API Gateway default 
handling, you have to p
 - a `body` field that contains your normal payload,
 - an optional `headers` field that includes any HTTP header you want to set, 
typically `Content-Type`.
 
-If you don't provide this structure, the API Gateway will generate a HTTP 
response with status code `204: No Content` and an empty body. If this occurs 
when it shouldn't, it's probably a sign that you have a HTTP response specified 
with the gateway but the undelying action doesn't return this structure.
+If you don't provide this structure, the API Gateway will generate a HTTP 
response with status code `204: No Content` and an empty body. If this occurs 
when it shouldn't, it's probably a sign that you have a HTTP response specified 
with the gateway but the underlying action doesn't return this structure.
 
-When you want to return an error, you need to provide the same structure 
wrapped into an `error` object. If you don't wrap it into an `error` object, it 
will still work from an HTTP prespective but OpenWhisk will not recognise it as 
an error.
+When you want to return an error, you need to provide the same structure 
wrapped into an `error` object. If you don't wrap it into an `error` object, it 
will still work from an HTTP perspective but OpenWhisk will not recognise it as 
an error.
 
 This structure will work with any language that is supported by OpenWhisk, 
such as python or Java. If you are using JavaScript, you can make use of 
`Promise.resolve` and `Promise.reject` to make your code more readable by 
removing the need for the `error` wrapper:
 
diff --git a/docs/wskdeploy_apigateway_http_sequence.md 
b/docs/wskdeploy_apigateway_http_sequence.md
index ae8ea1c..c861745 100644
--- a/docs/wskdeploy_apigateway_http_sequence.md
+++ b/docs/wskdeploy_apigateway_http_sequence.md
@@ -144,7 +144,7 @@ You should then see a JSON response with status code `200` 
and the following res
 
 ### Discussion
 
-By combining HTTP responses and sequences, you can re-use existing actions 
that are not designed to return HTTP responses by adding the necesary wrapper 
to the final result. You need to be careful how errors are handled as they will 
short-circuit the sequence execution and return early.
+By combining HTTP responses and sequences, you can re-use existing actions 
that are not designed to return HTTP responses by adding the necessary wrapper 
to the final result. You need to be careful how errors are handled as they will 
short-circuit the sequence execution and return early.
 
 ### Source code
 The source code for the manifest and JavaScript files can be found here:
diff --git a/docs/wskdeploy_apigateway_open_api_spec.md 
b/docs/wskdeploy_apigateway_open_api_spec.md
index b7f8341..97d0ffb 100644
--- a/docs/wskdeploy_apigateway_open_api_spec.md
+++ b/docs/wskdeploy_apigateway_open_api_spec.md
@@ -112,7 +112,7 @@ There are two major differences from _"Hello World" API_ 
example:
 - the root key is now project as the open api specification is a project wide 
concept.
 - a new `config` key specifying where the Open API Specification is located.
 
-The `config` key under `project` in the manifest file specifies where the Open 
API Specification is located. The keyword `config` was chosen to remain 
consistent with the `config-file` terminology in OpenWhisk CLI flag option. The 
Open API Specification describes in a JSON document the the base path, 
endpoint, HTTP verb, and other details describing the API. For example, the 
document above describes a GET endpoint at `/hello/world` that recieves JSON as 
input and returns JSON as output.
+The `config` key under `project` in the manifest file specifies where the Open 
API Specification is located. The keyword `config` was chosen to remain 
consistent with the `config-file` terminology in OpenWhisk CLI flag option. The 
Open API Specification describes in a JSON document the the base path, 
endpoint, HTTP verb, and other details describing the API. For example, the 
document above describes a GET endpoint at `/hello/world` that receives JSON as 
input and returns JSON as output.
 
 ### Deploying
 
diff --git a/docs/wskdeploy_faq.md b/docs/wskdeploy_faq.md
index ef7017d..84e7b0d 100644
--- a/docs/wskdeploy_faq.md
+++ b/docs/wskdeploy_faq.md
@@ -30,7 +30,7 @@
 
 ### What is the order of precedence for OpenWhisk credentials?
 
-- The ```wskdeploy``` utility finds the credentials (apihost, namespace, and 
auth) as well as the APIGW_ACCESS_TOKEN in the folowing precedence from highest 
to lowest:
+- The ```wskdeploy``` utility finds the credentials (apihost, namespace, and 
auth) as well as the APIGW_ACCESS_TOKEN in the following precedence from 
highest to lowest:
   - ```wskdeploy``` command line (i.e. ```wskdeploy --apihost --namespace 
--auth```)
   - The deployment file
   - The manifest file
diff --git a/docs/wskdeploy_sequence_basic.md b/docs/wskdeploy_sequence_basic.md
index 8d40c1d..84268b9 100644
--- a/docs/wskdeploy_sequence_basic.md
+++ b/docs/wskdeploy_sequence_basic.md
@@ -21,7 +21,7 @@
 
 ## Creating a basic Action sequence
 
-OpenWhisk supports creating a new, named composite action from sequencung 
multiple, compatible Actions.
+OpenWhisk supports creating a new, named composite action from sequencing 
multiple, compatible Actions.
 
 This example:
 - Shows how to use a Manifest to sequence three actions together to:
@@ -180,7 +180,7 @@ params: {
     "job": "gentleman"
  }
 ```
-the input paramaters are augmented by the first Action in the sequence to 
produce the output "member" object:
+the input parameters are augmented by the first Action in the sequence to 
produce the output "member" object:
 
 ```json
 member: {
diff --git a/parsers/manifest_parser.go b/parsers/manifest_parser.go
index bf5ad9f..7251559 100644
--- a/parsers/manifest_parser.go
+++ b/parsers/manifest_parser.go
@@ -660,7 +660,7 @@ func (dm *YAMLParser) readActionFunction(manifestFilePath 
string, manifestFileNa
 
        if utils.IsDirectory(actionFilePath) {
                zipFileName = actionFilePath + "." + runtimes.ZIP_FILE_EXTENSION
-               err := utils.NewZipWritter(actionFilePath, zipFileName, 
action.Include, action.Exclude, filepath.Dir(manifestFilePath)).Zip()
+               err := utils.NewZipWriter(actionFilePath, zipFileName, 
action.Include, action.Exclude, filepath.Dir(manifestFilePath)).Zip()
                if err != nil {
                        return actionFilePath, nil, err
                }
diff --git a/specification/html/spec_packages.md 
b/specification/html/spec_packages.md
index 5bc3297..9316510 100644
--- a/specification/html/spec_packages.md
+++ b/specification/html/spec_packages.md
@@ -158,7 +158,7 @@ my_whisk_package:
   version: 1.2.0
   license: Apache-2.0
   actions:
-    my_awsome_action:
+    my_awesome_action:
       <Action schema>
   triggers:
     trigger_for_awesome_action:
diff --git a/specification/html/spec_rule.md b/specification/html/spec_rule.md
index 1fa2137..22894f7 100644
--- a/specification/html/spec_rule.md
+++ b/specification/html/spec_rule.md
@@ -98,7 +98,7 @@ The Rule entity schema contains the information necessary to 
associates one trig
 
 ### Requirements
 - The Rule name (i.e., <ruleName>) MUST be less than or equal to 256 
characters.
-- The Rule entity schema includes all general [Entity Schem](#TBD) fields in 
addition to any fields
+- The Rule entity schema includes all general [Entity Schema](#TBD) fields in 
addition to any fields
 declared above.
 
 ### Notes
diff --git a/specification/html/spec_trigger.md 
b/specification/html/spec_trigger.md
index 3b51992..6a21186 100644
--- a/specification/html/spec_trigger.md
+++ b/specification/html/spec_trigger.md
@@ -73,7 +73,7 @@ The Trigger entity schema contains the necessary information 
to describe the str
   <p>N/A</p>
   </td>
   <td>
-  <p>The optional credential used to acces the feed service.</p>
+  <p>The optional credential used to access the feed service.</p>
   </td>
  </tr>
  <tr>
diff --git 
a/tests/apps/openwhisk-githubslackbot/actions/find-delayed-pull-requests.js 
b/tests/apps/openwhisk-githubslackbot/actions/find-delayed-pull-requests.js
index bee7c95..3b2d0c4 100644
--- a/tests/apps/openwhisk-githubslackbot/actions/find-delayed-pull-requests.js
+++ b/tests/apps/openwhisk-githubslackbot/actions/find-delayed-pull-requests.js
@@ -185,7 +185,7 @@ function stopTracking(pullRequest, ifInState) {
                     })
                     .then (function () {
                         return {
-                            message: "Sucessfully stopped tracking " + id
+                            message: "Successfully stopped tracking " + id
                         };
                     });
                 } else {
@@ -240,7 +240,7 @@ function prIsTooOld(prDoc) {
     var moment = require("moment");
     // read lastUpdate from github
     var readyMoment = moment(prDoc.lastUpdate);
-    // depeneding on the state of pull request, "READY" or "REVIEW"
+    // depending on the state of pull request, "READY" or "REVIEW"
     // read the limit amount and days
     var limit = limits[prDoc.state];
     // moment.diff() returns difference between today and
diff --git a/tests/apps/openwhisk-githubslackbot/actions/track-pull-requests.js 
b/tests/apps/openwhisk-githubslackbot/actions/track-pull-requests.js
index 108c67d..bfd7613 100644
--- a/tests/apps/openwhisk-githubslackbot/actions/track-pull-requests.js
+++ b/tests/apps/openwhisk-githubslackbot/actions/track-pull-requests.js
@@ -231,7 +231,7 @@ function stopTracking(pullRequest, ifInState) {
                     })
                     .then (function () {
                         return {
-                            message: "Sucessfully stopped tracking " + id
+                            message: "Successfully stopped tracking " + id
                         };
                     });
                 } else {
diff --git a/utils/file.go b/utils/file.go
index e8470f4..03f5c43 100644
--- a/utils/file.go
+++ b/utils/file.go
@@ -49,7 +49,7 @@ func isFile(path string) (bool, error) {
                        return true, nil
                }
        }
-       // stat returned an error and here we are chekcking if it was 
os.PathError
+       // stat returned an error and here we are checking if it was 
os.PathError
        if !os.IsNotExist(err) {
                return false, nil
        }
diff --git a/utils/misc_test.go b/utils/misc_test.go
index 8735302..a5d0bdf 100644
--- a/utils/misc_test.go
+++ b/utils/misc_test.go
@@ -61,7 +61,7 @@ func TestDependencies(t *testing.T) {
 func TestNewZipWriter(t *testing.T) {
        filePath := "../tests/src/integration/zipaction/actions/cat"
        zipName := filePath + ".zip"
-       err := NewZipWritter(filePath, zipName, make([][]string, 0), 
make([]string, 0), "").Zip()
+       err := NewZipWriter(filePath, zipName, make([][]string, 0), 
make([]string, 0), "").Zip()
        defer os.Remove(zipName)
        assert.Equal(t, nil, err, "zip folder error happened.")
 }
diff --git a/utils/validation.go b/utils/validation.go
index 45e196b..8fbf5e3 100644
--- a/utils/validation.go
+++ b/utils/validation.go
@@ -62,7 +62,7 @@ var license_json = LicenseJSON{}
 //Check local data record at first
 //Then check remote json data
 func CheckLicense(license string) bool {
-       // TODO(#673) Strict flag should cause an error to be generatd
+       // TODO(#673) Strict flag should cause an error to be generated
        if !LicenseLocalValidation(license) && 
!LicenseRemoteValidation(license) {
                warningString := wski18n.T(
                        wski18n.ID_WARN_KEYVALUE_INVALID,
diff --git a/utils/zip.go b/utils/zip.go
index 72a1e2e..0aeb0f2 100644
--- a/utils/zip.go
+++ b/utils/zip.go
@@ -31,8 +31,8 @@ import (
 const PATH_WILDCARD = "*"
 const ONE_DIR_UP = "../"
 
-func NewZipWritter(src string, des string, include [][]string, exclude 
[]string, manifestFilePath string) *ZipWritter {
-       zw := &ZipWritter{
+func NewZipWriter(src string, des string, include [][]string, exclude 
[]string, manifestFilePath string) *ZipWriter {
+       zw := &ZipWriter{
                src:              src,
                des:              des,
                include:          include,
@@ -43,14 +43,14 @@ func NewZipWritter(src string, des string, include 
[][]string, exclude []string,
        return zw
 }
 
-type ZipWritter struct {
+type ZipWriter struct {
        src              string
        des              string
        include          [][]string
        exclude          []string
        excludedFiles    map[string]bool
        manifestFilePath string
-       zipWritter       *zip.Writer
+       zipWriter       *zip.Writer
 }
 
 type Include struct {
@@ -58,7 +58,7 @@ type Include struct {
        destination string
 }
 
-func (zw *ZipWritter) zipFile(path string, f os.FileInfo, err error) error {
+func (zw *ZipWriter) zipFile(path string, f os.FileInfo, err error) error {
        var file *os.File
        var wr io.Writer
        var verboseMsg string
@@ -85,7 +85,7 @@ func (zw *ZipWritter) zipFile(path string, f os.FileInfo, err 
error) error {
        defer file.Close()
 
        fileName := strings.TrimPrefix(path, zw.src+"/")
-       if wr, err = zw.zipWritter.Create(fileName); err != nil {
+       if wr, err = zw.zipWriter.Create(fileName); err != nil {
                return err
        }
 
@@ -100,7 +100,7 @@ func (zw *ZipWritter) zipFile(path string, f os.FileInfo, 
err error) error {
        return nil
 }
 
-func (zw *ZipWritter) buildIncludeMetadata() ([]Include, error) {
+func (zw *ZipWriter) buildIncludeMetadata() ([]Include, error) {
        var includeInfo []Include
        var listOfSourceFiles []string
        var err error
@@ -220,7 +220,7 @@ func (zw *ZipWritter) buildIncludeMetadata() ([]Include, 
error) {
        return includeInfo, nil
 }
 
-func (zw *ZipWritter) buildExcludeMetadata() error {
+func (zw *ZipWriter) buildExcludeMetadata() error {
        var err error
        for _, exclude := range zw.exclude {
                exclude = filepath.Join(zw.manifestFilePath, exclude)
@@ -231,7 +231,7 @@ func (zw *ZipWritter) buildExcludeMetadata() error {
        return err
 }
 
-func (zw *ZipWritter) findExcludedIncludedFiles(functionPath string, flag 
bool) error {
+func (zw *ZipWriter) findExcludedIncludedFiles(functionPath string, flag bool) 
error {
        var err error
        var files []string
        var excludedFiles []string
@@ -266,7 +266,7 @@ func (zw *ZipWritter) 
findExcludedIncludedFiles(functionPath string, flag bool)
        return err
 }
 
-func (zw *ZipWritter) Zip() error {
+func (zw *ZipWriter) Zip() error {
 
        var zipFile *os.File
        var err error
@@ -286,7 +286,7 @@ func (zw *ZipWritter) Zip() error {
        wskprint.PrintlnOpenWhiskVerbose(Flags.Verbose, verboseMsg)
 
        // creating a new zip writter for greeting.zip
-       zw.zipWritter = zip.NewWriter(zipFile)
+       zw.zipWriter = zip.NewWriter(zipFile)
 
        // build a map of file names and bool indicating whether the file is 
included or excluded
        // iterate over the directory specified in "function", find the list of 
files and mark them as not excluded
@@ -342,7 +342,7 @@ func (zw *ZipWritter) Zip() error {
 
        // now close the zip file greeting.zip as all the included items
        // are added into the zip file along with the action root dir
-       if err = zw.zipWritter.Close(); err != nil {
+       if err = zw.zipWriter.Close(); err != nil {
                return err
        }
 
diff --git a/wskenv/environment_test.go b/wskenv/environment_test.go
index 6a069ed..77d80a5 100644
--- a/wskenv/environment_test.go
+++ b/wskenv/environment_test.go
@@ -36,7 +36,7 @@ func TestInterpolateStringWithEnvVar(t *testing.T) {
        assert.Equal(t, "NoDollar", InterpolateStringWithEnvVar("NoDollar"), 
"NoDollar should be no change.")
        assert.Equal(t, "oh, dollars!", 
InterpolateStringWithEnvVar("$WithDollar"), "dollar sign should be handled.")
        assert.Equal(t, "5000", InterpolateStringWithEnvVar("5000"), "Should be 
no difference between integer and string.")
-       assert.Equal(t, "", InterpolateStringWithEnvVar("$WithDollarAgain"), 
"if not found in environemnt, return empty string.")
+       assert.Equal(t, "", InterpolateStringWithEnvVar("$WithDollarAgain"), 
"if not found in environment, return empty string.")
        assert.Equal(t, "oh, dollars!.ccc.aaa", 
InterpolateStringWithEnvVar("${WithDollar}.ccc.aaa"), "String concatenation 
fail")
        assert.Equal(t, "ddd.NO dollar.aaa", 
InterpolateStringWithEnvVar("ddd.${NoDollar}.aaa"), "String concatenation fail")
        assert.Equal(t, "oh, dollars!.NO dollar.aaa", 
InterpolateStringWithEnvVar("${WithDollar}.${NoDollar}.aaa"), "String 
concatenation fail")
diff --git a/wski18n/i18n_resources.go b/wski18n/i18n_resources.go
index e7d4e67..f85daf8 100644
--- a/wski18n/i18n_resources.go
+++ b/wski18n/i18n_resources.go
@@ -1,17 +1,3 @@
-// Code generated by go-bindata.
-// sources:
-// wski18n/resources/de_DE.all.json
-// wski18n/resources/en_US.all.json
-// wski18n/resources/es_ES.all.json
-// wski18n/resources/fr_FR.all.json
-// wski18n/resources/it_IT.all.json
-// wski18n/resources/ja_JA.all.json
-// wski18n/resources/ko_KR.all.json
-// wski18n/resources/pt_BR.all.json
-// wski18n/resources/zh_Hans.all.json
-// wski18n/resources/zh_Hant.all.json
-// DO NOT EDIT!
-
 package wski18n
 
 import (
@@ -19,14 +5,10 @@ import (
        "compress/gzip"
        "fmt"
        "io"
-       "io/ioutil"
-       "os"
-       "path/filepath"
        "strings"
-       "time"
 )
 
-func bindataRead(data []byte, name string) ([]byte, error) {
+func bindata_read(data []byte, name string) ([]byte, error) {
        gz, err := gzip.NewReader(bytes.NewBuffer(data))
        if err != nil {
                return nil, fmt.Errorf("Read %q: %v", name, err)
@@ -34,290 +16,116 @@ func bindataRead(data []byte, name string) ([]byte, 
error) {
 
        var buf bytes.Buffer
        _, err = io.Copy(&buf, gz)
-       clErr := gz.Close()
+       gz.Close()
 
        if err != nil {
                return nil, fmt.Errorf("Read %q: %v", name, err)
        }
-       if clErr != nil {
-               return nil, err
-       }
 
        return buf.Bytes(), nil
 }
 
-type asset struct {
-       bytes []byte
-       info  os.FileInfo
-}
-
-type bindataFileInfo struct {
-       name    string
-       size    int64
-       mode    os.FileMode
-       modTime time.Time
-}
-
-func (fi bindataFileInfo) Name() string {
-       return fi.name
-}
-func (fi bindataFileInfo) Size() int64 {
-       return fi.size
-}
-func (fi bindataFileInfo) Mode() os.FileMode {
-       return fi.mode
-}
-func (fi bindataFileInfo) ModTime() time.Time {
-       return fi.modTime
-}
-func (fi bindataFileInfo) IsDir() bool {
-       return false
-}
-func (fi bindataFileInfo) Sys() interface{} {
-       return nil
-}
-
-var _wski18nResourcesDe_deAllJson = 
[]byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x01\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00")
+var _wski18n_resources_de_de_all_json = 
[]byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x01\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00")
 
-func wski18nResourcesDe_deAllJsonBytes() ([]byte, error) {
-       return bindataRead(
-               _wski18nResourcesDe_deAllJson,
+func wski18n_resources_de_de_all_json() ([]byte, error) {
+       return bindata_read(
+               _wski18n_resources_de_de_all_json,
                "wski18n/resources/de_DE.all.json",
        )
 }
 
-func wski18nResourcesDe_deAllJson() (*asset, error) {
-       bytes, err := wski18nResourcesDe_deAllJsonBytes()
-       if err != nil {
-               return nil, err
-       }
-
-       info := bindataFileInfo{name: "wski18n/resources/de_DE.all.json", size: 
0, mode: os.FileMode(420), modTime: time.Unix(1515697090, 0)}
-       a := &asset{bytes: bytes, info: info}
-       return a, nil
-}
+var _wski18n_resources_en_us_all_json = 
[]byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\xcc\x7c\x7b\x8f\x1b\x37\x92\xf8\xff\xf9\x14\x85\x60\x01\x27\x80\x46\xf6\x2e\x7e\xf8\x61\x31\x77\x3e\x60\xd6\x1e\x27\xb3\xb1\x33\xbe\x79\x24\xc8\xd9\x83\x36\xd5\x5d\x92\xb8\xd3\x4d\xf6\x92\x6c\xc9\xca\x40\xdf\xfd\x50\x45\xb2\xbb\xa5\x51\x3f\x34\x76\x70\xf1\x3f\xd6\x88\x64\xbd\x58\x2c\xd6\x8b\xfa\xf0\x0d\xc0\xc3\x37\x00\x00\xdf\xca\xec\xdb\x53\xf8\xb6\xb0\x8b\xa4\x34\x38\x97\x9f\x13\x34\x46\x9b\x6f\x27
 [...]
 
-var _wski18nResourcesEn_usAllJson = 
[]byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\xcc\x7c\x7b\x8f\x1b\x37\x92\xf8\xff\xf9\x14\x85\x60\x01\x27\x80\x46\xf6\x2e\x7e\xf8\x61\x31\x77\x3e\x60\xd6\x1e\x27\xb3\xb1\x33\xbe\x79\x24\xc8\xd9\x83\x36\xd5\x5d\x92\xb8\xd3\x4d\xf6\x92\x6c\xc9\xca\x40\xdf\xfd\x50\x45\xb2\xbb\xa5\x51\x3f\x34\x76\x70\xf1\x3f\xd6\x88\x64\xbd\x58\x2c\xd6\x8b\xfa\xf0\x0d\xc0\xc3\x37\x00\x00\xdf\xca\xec\xdb\x53\xf8\xb6\xb0\x8b\xa4\x34\x38\x97\x9f\x13\x34\x46\x9b\x6f\x27\x7e
 [...]
-
-func wski18nResourcesEn_usAllJsonBytes() ([]byte, error) {
-       return bindataRead(
-               _wski18nResourcesEn_usAllJson,
+func wski18n_resources_en_us_all_json() ([]byte, error) {
+       return bindata_read(
+               _wski18n_resources_en_us_all_json,
                "wski18n/resources/en_US.all.json",
        )
 }
 
-func wski18nResourcesEn_usAllJson() (*asset, error) {
-       bytes, err := wski18nResourcesEn_usAllJsonBytes()
-       if err != nil {
-               return nil, err
-       }
+var _wski18n_resources_es_es_all_json = 
[]byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x01\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00")
 
-       info := bindataFileInfo{name: "wski18n/resources/en_US.all.json", size: 
21925, mode: os.FileMode(420), modTime: time.Unix(1580408211, 0)}
-       a := &asset{bytes: bytes, info: info}
-       return a, nil
-}
-
-var _wski18nResourcesEs_esAllJson = 
[]byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x01\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00")
-
-func wski18nResourcesEs_esAllJsonBytes() ([]byte, error) {
-       return bindataRead(
-               _wski18nResourcesEs_esAllJson,
+func wski18n_resources_es_es_all_json() ([]byte, error) {
+       return bindata_read(
+               _wski18n_resources_es_es_all_json,
                "wski18n/resources/es_ES.all.json",
        )
 }
 
-func wski18nResourcesEs_esAllJson() (*asset, error) {
-       bytes, err := wski18nResourcesEs_esAllJsonBytes()
-       if err != nil {
-               return nil, err
-       }
+var _wski18n_resources_fr_fr_all_json = 
[]byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x8a\xe6\x52\x50\xa8\xe6\x52\x50\x50\x50\x50\xca\x4c\x51\xb2\x52\x50\x4a\xaa\x2c\x48\x2c\x2e\x56\x48\x4e\x2d\x2a\xc9\x4c\xcb\x4c\x4e\x2c\x49\x55\x48\xce\x48\x4d\xce\xce\xcc\x4b\x57\xd2\x81\x28\x2c\x29\x4a\xcc\x2b\xce\x49\x2c\xc9\xcc\xcf\x03\xe9\x08\xce\xcf\x4d\x55\x40\x12\x53\xc8\xcc\x53\x70\x2b\x4a\xcd\x4b\xce\x50\xe2\x52\x50\xa8\xe5\x8a\xe5\x02\x04\x00\x00\xff\xff\x45\xa4\xe9\x62\x65\x00\x00\x00")
 
-       info := bindataFileInfo{name: "wski18n/resources/es_ES.all.json", size: 
0, mode: os.FileMode(420), modTime: time.Unix(1515697090, 0)}
-       a := &asset{bytes: bytes, info: info}
-       return a, nil
-}
-
-var _wski18nResourcesFr_frAllJson = 
[]byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x8a\xe6\x52\x50\xa8\xe6\x52\x50\x50\x50\x50\xca\x4c\x51\xb2\x52\x50\x4a\xaa\x2c\x48\x2c\x2e\x56\x48\x4e\x2d\x2a\xc9\x4c\xcb\x4c\x4e\x2c\x49\x55\x48\xce\x48\x4d\xce\xce\xcc\x4b\x57\xd2\x81\x28\x2c\x29\x4a\xcc\x2b\xce\x49\x2c\xc9\xcc\xcf\x03\xe9\x08\xce\xcf\x4d\x55\x40\x12\x53\xc8\xcc\x53\x70\x2b\x4a\xcd\x4b\xce\x50\xe2\x52\x50\xa8\xe5\x8a\xe5\x02\x04\x00\x00\xff\xff\x45\xa4\xe9\x62\x65\x00\x00\x00")
-
-func wski18nResourcesFr_frAllJsonBytes() ([]byte, error) {
-       return bindataRead(
-               _wski18nResourcesFr_frAllJson,
+func wski18n_resources_fr_fr_all_json() ([]byte, error) {
+       return bindata_read(
+               _wski18n_resources_fr_fr_all_json,
                "wski18n/resources/fr_FR.all.json",
        )
 }
 
-func wski18nResourcesFr_frAllJson() (*asset, error) {
-       bytes, err := wski18nResourcesFr_frAllJsonBytes()
-       if err != nil {
-               return nil, err
-       }
-
-       info := bindataFileInfo{name: "wski18n/resources/fr_FR.all.json", size: 
101, mode: os.FileMode(420), modTime: time.Unix(1515697090, 0)}
-       a := &asset{bytes: bytes, info: info}
-       return a, nil
-}
-
-var _wski18nResourcesIt_itAllJson = 
[]byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x01\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00")
+var _wski18n_resources_it_it_all_json = 
[]byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x01\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00")
 
-func wski18nResourcesIt_itAllJsonBytes() ([]byte, error) {
-       return bindataRead(
-               _wski18nResourcesIt_itAllJson,
+func wski18n_resources_it_it_all_json() ([]byte, error) {
+       return bindata_read(
+               _wski18n_resources_it_it_all_json,
                "wski18n/resources/it_IT.all.json",
        )
 }
 
-func wski18nResourcesIt_itAllJson() (*asset, error) {
-       bytes, err := wski18nResourcesIt_itAllJsonBytes()
-       if err != nil {
-               return nil, err
-       }
-
-       info := bindataFileInfo{name: "wski18n/resources/it_IT.all.json", size: 
0, mode: os.FileMode(420), modTime: time.Unix(1515697090, 0)}
-       a := &asset{bytes: bytes, info: info}
-       return a, nil
-}
-
-var _wski18nResourcesJa_jaAllJson = 
[]byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x01\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00")
+var _wski18n_resources_ja_ja_all_json = 
[]byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x01\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00")
 
-func wski18nResourcesJa_jaAllJsonBytes() ([]byte, error) {
-       return bindataRead(
-               _wski18nResourcesJa_jaAllJson,
+func wski18n_resources_ja_ja_all_json() ([]byte, error) {
+       return bindata_read(
+               _wski18n_resources_ja_ja_all_json,
                "wski18n/resources/ja_JA.all.json",
        )
 }
 
-func wski18nResourcesJa_jaAllJson() (*asset, error) {
-       bytes, err := wski18nResourcesJa_jaAllJsonBytes()
-       if err != nil {
-               return nil, err
-       }
-
-       info := bindataFileInfo{name: "wski18n/resources/ja_JA.all.json", size: 
0, mode: os.FileMode(420), modTime: time.Unix(1515697090, 0)}
-       a := &asset{bytes: bytes, info: info}
-       return a, nil
-}
+var _wski18n_resources_ko_kr_all_json = 
[]byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x01\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00")
 
-var _wski18nResourcesKo_krAllJson = 
[]byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x01\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00")
-
-func wski18nResourcesKo_krAllJsonBytes() ([]byte, error) {
-       return bindataRead(
-               _wski18nResourcesKo_krAllJson,
+func wski18n_resources_ko_kr_all_json() ([]byte, error) {
+       return bindata_read(
+               _wski18n_resources_ko_kr_all_json,
                "wski18n/resources/ko_KR.all.json",
        )
 }
 
-func wski18nResourcesKo_krAllJson() (*asset, error) {
-       bytes, err := wski18nResourcesKo_krAllJsonBytes()
-       if err != nil {
-               return nil, err
-       }
-
-       info := bindataFileInfo{name: "wski18n/resources/ko_KR.all.json", size: 
0, mode: os.FileMode(420), modTime: time.Unix(1515697090, 0)}
-       a := &asset{bytes: bytes, info: info}
-       return a, nil
-}
-
-var _wski18nResourcesPt_brAllJson = 
[]byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x01\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00")
+var _wski18n_resources_pt_br_all_json = 
[]byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x01\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00")
 
-func wski18nResourcesPt_brAllJsonBytes() ([]byte, error) {
-       return bindataRead(
-               _wski18nResourcesPt_brAllJson,
+func wski18n_resources_pt_br_all_json() ([]byte, error) {
+       return bindata_read(
+               _wski18n_resources_pt_br_all_json,
                "wski18n/resources/pt_BR.all.json",
        )
 }
 
-func wski18nResourcesPt_brAllJson() (*asset, error) {
-       bytes, err := wski18nResourcesPt_brAllJsonBytes()
-       if err != nil {
-               return nil, err
-       }
+var _wski18n_resources_zh_hans_all_json = 
[]byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x01\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00")
 
-       info := bindataFileInfo{name: "wski18n/resources/pt_BR.all.json", size: 
0, mode: os.FileMode(420), modTime: time.Unix(1515697090, 0)}
-       a := &asset{bytes: bytes, info: info}
-       return a, nil
-}
-
-var _wski18nResourcesZh_hansAllJson = 
[]byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x01\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00")
-
-func wski18nResourcesZh_hansAllJsonBytes() ([]byte, error) {
-       return bindataRead(
-               _wski18nResourcesZh_hansAllJson,
+func wski18n_resources_zh_hans_all_json() ([]byte, error) {
+       return bindata_read(
+               _wski18n_resources_zh_hans_all_json,
                "wski18n/resources/zh_Hans.all.json",
        )
 }
 
-func wski18nResourcesZh_hansAllJson() (*asset, error) {
-       bytes, err := wski18nResourcesZh_hansAllJsonBytes()
-       if err != nil {
-               return nil, err
-       }
-
-       info := bindataFileInfo{name: "wski18n/resources/zh_Hans.all.json", 
size: 0, mode: os.FileMode(420), modTime: time.Unix(1515697090, 0)}
-       a := &asset{bytes: bytes, info: info}
-       return a, nil
-}
-
-var _wski18nResourcesZh_hantAllJson = 
[]byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x01\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00")
+var _wski18n_resources_zh_hant_all_json = 
[]byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x01\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00")
 
-func wski18nResourcesZh_hantAllJsonBytes() ([]byte, error) {
-       return bindataRead(
-               _wski18nResourcesZh_hantAllJson,
+func wski18n_resources_zh_hant_all_json() ([]byte, error) {
+       return bindata_read(
+               _wski18n_resources_zh_hant_all_json,
                "wski18n/resources/zh_Hant.all.json",
        )
 }
 
-func wski18nResourcesZh_hantAllJson() (*asset, error) {
-       bytes, err := wski18nResourcesZh_hantAllJsonBytes()
-       if err != nil {
-               return nil, err
-       }
-
-       info := bindataFileInfo{name: "wski18n/resources/zh_Hant.all.json", 
size: 0, mode: os.FileMode(420), modTime: time.Unix(1515697090, 0)}
-       a := &asset{bytes: bytes, info: info}
-       return a, nil
-}
-
 // Asset loads and returns the asset for the given name.
 // It returns an error if the asset could not be found or
 // could not be loaded.
 func Asset(name string) ([]byte, error) {
        cannonicalName := strings.Replace(name, "\\", "/", -1)
        if f, ok := _bindata[cannonicalName]; ok {
-               a, err := f()
-               if err != nil {
-                       return nil, fmt.Errorf("Asset %s can't read by error: 
%v", name, err)
-               }
-               return a.bytes, nil
+               return f()
        }
        return nil, fmt.Errorf("Asset %s not found", name)
 }
 
-// MustAsset is like Asset but panics when Asset would return an error.
-// It simplifies safe initialization of global variables.
-func MustAsset(name string) []byte {
-       a, err := Asset(name)
-       if err != nil {
-               panic("asset: Asset(" + name + "): " + err.Error())
-       }
-
-       return a
-}
-
-// AssetInfo loads and returns the asset info for the given name.
-// It returns an error if the asset could not be found or
-// could not be loaded.
-func AssetInfo(name string) (os.FileInfo, error) {
-       cannonicalName := strings.Replace(name, "\\", "/", -1)
-       if f, ok := _bindata[cannonicalName]; ok {
-               a, err := f()
-               if err != nil {
-                       return nil, fmt.Errorf("AssetInfo %s can't read by 
error: %v", name, err)
-               }
-               return a.info, nil
-       }
-       return nil, fmt.Errorf("AssetInfo %s not found", name)
-}
-
 // AssetNames returns the names of the assets.
 func AssetNames() []string {
        names := make([]string, 0, len(_bindata))
@@ -328,19 +136,18 @@ func AssetNames() []string {
 }
 
 // _bindata is a table, holding each asset generator, mapped to its name.
-var _bindata = map[string]func() (*asset, error){
-       "wski18n/resources/de_DE.all.json":   wski18nResourcesDe_deAllJson,
-       "wski18n/resources/en_US.all.json":   wski18nResourcesEn_usAllJson,
-       "wski18n/resources/es_ES.all.json":   wski18nResourcesEs_esAllJson,
-       "wski18n/resources/fr_FR.all.json":   wski18nResourcesFr_frAllJson,
-       "wski18n/resources/it_IT.all.json":   wski18nResourcesIt_itAllJson,
-       "wski18n/resources/ja_JA.all.json":   wski18nResourcesJa_jaAllJson,
-       "wski18n/resources/ko_KR.all.json":   wski18nResourcesKo_krAllJson,
-       "wski18n/resources/pt_BR.all.json":   wski18nResourcesPt_brAllJson,
-       "wski18n/resources/zh_Hans.all.json": wski18nResourcesZh_hansAllJson,
-       "wski18n/resources/zh_Hant.all.json": wski18nResourcesZh_hantAllJson,
+var _bindata = map[string]func() ([]byte, error){
+       "wski18n/resources/de_DE.all.json": wski18n_resources_de_de_all_json,
+       "wski18n/resources/en_US.all.json": wski18n_resources_en_us_all_json,
+       "wski18n/resources/es_ES.all.json": wski18n_resources_es_es_all_json,
+       "wski18n/resources/fr_FR.all.json": wski18n_resources_fr_fr_all_json,
+       "wski18n/resources/it_IT.all.json": wski18n_resources_it_it_all_json,
+       "wski18n/resources/ja_JA.all.json": wski18n_resources_ja_ja_all_json,
+       "wski18n/resources/ko_KR.all.json": wski18n_resources_ko_kr_all_json,
+       "wski18n/resources/pt_BR.all.json": wski18n_resources_pt_br_all_json,
+       "wski18n/resources/zh_Hans.all.json": 
wski18n_resources_zh_hans_all_json,
+       "wski18n/resources/zh_Hant.all.json": 
wski18n_resources_zh_hant_all_json,
 }
-
 // AssetDir returns the file names below a certain
 // directory embedded in the file by go-bindata.
 // For example if you run go-bindata on data/... and data contains the
@@ -370,77 +177,39 @@ func AssetDir(name string) ([]string, error) {
                return nil, fmt.Errorf("Asset %s not found", name)
        }
        rv := make([]string, 0, len(node.Children))
-       for childName := range node.Children {
-               rv = append(rv, childName)
+       for name := range node.Children {
+               rv = append(rv, name)
        }
        return rv, nil
 }
 
-type bintree struct {
-       Func     func() (*asset, error)
-       Children map[string]*bintree
-}
-
-var _bintree = &bintree{nil, map[string]*bintree{
-       "wski18n": &bintree{nil, map[string]*bintree{
-               "resources": &bintree{nil, map[string]*bintree{
-                       "de_DE.all.json":   
&bintree{wski18nResourcesDe_deAllJson, map[string]*bintree{}},
-                       "en_US.all.json":   
&bintree{wski18nResourcesEn_usAllJson, map[string]*bintree{}},
-                       "es_ES.all.json":   
&bintree{wski18nResourcesEs_esAllJson, map[string]*bintree{}},
-                       "fr_FR.all.json":   
&bintree{wski18nResourcesFr_frAllJson, map[string]*bintree{}},
-                       "it_IT.all.json":   
&bintree{wski18nResourcesIt_itAllJson, map[string]*bintree{}},
-                       "ja_JA.all.json":   
&bintree{wski18nResourcesJa_jaAllJson, map[string]*bintree{}},
-                       "ko_KR.all.json":   
&bintree{wski18nResourcesKo_krAllJson, map[string]*bintree{}},
-                       "pt_BR.all.json":   
&bintree{wski18nResourcesPt_brAllJson, map[string]*bintree{}},
-                       "zh_Hans.all.json": 
&bintree{wski18nResourcesZh_hansAllJson, map[string]*bintree{}},
-                       "zh_Hant.all.json": 
&bintree{wski18nResourcesZh_hantAllJson, map[string]*bintree{}},
+type _bintree_t struct {
+       Func func() ([]byte, error)
+       Children map[string]*_bintree_t
+}
+var _bintree = &_bintree_t{nil, map[string]*_bintree_t{
+       "wski18n": &_bintree_t{nil, map[string]*_bintree_t{
+               "resources": &_bintree_t{nil, map[string]*_bintree_t{
+                       "de_DE.all.json": 
&_bintree_t{wski18n_resources_de_de_all_json, map[string]*_bintree_t{
+                       }},
+                       "en_US.all.json": 
&_bintree_t{wski18n_resources_en_us_all_json, map[string]*_bintree_t{
+                       }},
+                       "es_ES.all.json": 
&_bintree_t{wski18n_resources_es_es_all_json, map[string]*_bintree_t{
+                       }},
+                       "fr_FR.all.json": 
&_bintree_t{wski18n_resources_fr_fr_all_json, map[string]*_bintree_t{
+                       }},
+                       "it_IT.all.json": 
&_bintree_t{wski18n_resources_it_it_all_json, map[string]*_bintree_t{
+                       }},
+                       "ja_JA.all.json": 
&_bintree_t{wski18n_resources_ja_ja_all_json, map[string]*_bintree_t{
+                       }},
+                       "ko_KR.all.json": 
&_bintree_t{wski18n_resources_ko_kr_all_json, map[string]*_bintree_t{
+                       }},
+                       "pt_BR.all.json": 
&_bintree_t{wski18n_resources_pt_br_all_json, map[string]*_bintree_t{
+                       }},
+                       "zh_Hans.all.json": 
&_bintree_t{wski18n_resources_zh_hans_all_json, map[string]*_bintree_t{
+                       }},
+                       "zh_Hant.all.json": 
&_bintree_t{wski18n_resources_zh_hant_all_json, map[string]*_bintree_t{
+                       }},
                }},
        }},
 }}
-
-// RestoreAsset restores an asset under the given directory
-func RestoreAsset(dir, name string) error {
-       data, err := Asset(name)
-       if err != nil {
-               return err
-       }
-       info, err := AssetInfo(name)
-       if err != nil {
-               return err
-       }
-       err = os.MkdirAll(_filePath(dir, filepath.Dir(name)), os.FileMode(0755))
-       if err != nil {
-               return err
-       }
-       err = ioutil.WriteFile(_filePath(dir, name), data, info.Mode())
-       if err != nil {
-               return err
-       }
-       err = os.Chtimes(_filePath(dir, name), info.ModTime(), info.ModTime())
-       if err != nil {
-               return err
-       }
-       return nil
-}
-
-// RestoreAssets restores an asset under the given directory recursively
-func RestoreAssets(dir, name string) error {
-       children, err := AssetDir(name)
-       // File
-       if err != nil {
-               return RestoreAsset(dir, name)
-       }
-       // Dir
-       for _, child := range children {
-               err = RestoreAssets(dir, filepath.Join(name, child))
-               if err != nil {
-                       return err
-               }
-       }
-       return nil
-}
-
-func _filePath(dir, name string) string {
-       cannonicalName := strings.Replace(name, "\\", "/", -1)
-       return filepath.Join(append([]string{dir}, 
strings.Split(cannonicalName, "/")...)...)
-}
diff --git a/wski18n/resources/en_US.all.json b/wski18n/resources/en_US.all.json
index 27dab3a..9d239b5 100644
--- a/wski18n/resources/en_US.all.json
+++ b/wski18n/resources/en_US.all.json
@@ -21,7 +21,7 @@
   },
   {
     "id": "msg_cmd_desc_long_root",
-    "translation": "The OpenWhisk Deployment Tool\n\nTo begin working with 
wskdeploy, run the 'wskdeploy' command:\n\n\t$ wskdeploy\n\nThis will deploy 
OpenWhisk assets specified in manifest.yaml or manifest.yml\nIf current 
directory doesnt have any manifest file, wskdeploy prints this help 
message.\n\nThe most common ways of using wskdeploy to deploy and/or undeploy 
OpenWhisk assets are:\n\n$ wskdeploy\n$ wskdeploy -m path/to/manifest.yaml\n$ 
wskdeploy -m path/to/manifest.yaml -d path/t [...]
+    "translation": "The OpenWhisk Deployment Tool\n\nTo begin working with 
wskdeploy, run the 'wskdeploy' command:\n\n\t$ wskdeploy\n\nThis will deploy 
OpenWhisk assets specified in manifest.yaml or manifest.yml\nIf current 
directory doesnt have any manifest file, wskdeploy prints this help 
message.\n\nThe most common ways of using wskdeploy to deploy and/or undeploy 
OpenWhisk assets are:\n\n$ wskdeploy\n$ wskdeploy -m path/to/manifest.yaml\n$ 
wskdeploy -m path/to/manifest.yaml -d path/t [...]
   },
   {
     "id": "msg_cmd_desc_short_report",
@@ -181,7 +181,7 @@
   },
   {
     "id": "msg_deployment_failed",
-    "translation": "Deployment did not complete sucessfully. Run `wskdeploy 
undeploy` to remove partially deployed assets.\n"
+    "translation": "Deployment did not complete successfully. Run `wskdeploy 
undeploy` to remove partially deployed assets.\n"
   },
   {
     "id": "msg_deployment_report_status",
@@ -197,7 +197,7 @@
   },
   {
     "id": "msg_undeployment_failed",
-    "translation": "Undeployment did not complete sucessfully.\n"
+    "translation": "Undeployment did not complete successfully.\n"
   },
   {
     "id": "msg_undeployment_succeeded",
@@ -233,7 +233,7 @@
   },
   {
     "id": "msg_dependency_deployment_failure",
-    "translation": "Deployment of dependency [{{.name}}] did not complete 
sucessfully. Run `wskdeploy undeploy` to remove partially deployed assets.\n"
+    "translation": "Deployment of dependency [{{.name}}] did not complete 
successfully. Run `wskdeploy undeploy` to remove partially deployed assets.\n"
   },
   {
     "id": "msg_dependency_undeploying",
@@ -245,11 +245,11 @@
   },
   {
     "id": "msg_dependency_undeployment_failure",
-    "translation": "Undeployment of dependency [{{.name}}] did not complete 
sucessfully.\n"
+    "translation": "Undeployment of dependency [{{.name}}] did not complete 
successfully.\n"
   },
   {
     "id": "msg_managed_undeployment_failed",
-    "translation": "Undeployment of deleted entities did not complete 
sucessfully during managed deployment. Run `wskdeploy undeploy` to remove 
partially deployed assets.\n"
+    "translation": "Undeployment of deleted entities did not complete 
successfully during managed deployment. Run `wskdeploy undeploy` to remove 
partially deployed assets.\n"
   },
   {
     "id": "msg_managed_found_deleted_entity",

Reply via email to