Repository: incubator-hawq Updated Branches: refs/heads/master 355c43704 -> 195909207
HAWQ-778. Fix typos in hawq register Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/19590920 Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/19590920 Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/19590920 Branch: refs/heads/master Commit: 195909207313d802a110666c76187cefab96d09c Parents: 035b48b Author: Yancheng Luo <[email protected]> Authored: Tue Jun 7 10:59:33 2016 +0800 Committer: Lili Ma <[email protected]> Committed: Tue Jun 7 11:03:56 2016 +0800 ---------------------------------------------------------------------- tools/bin/hawqregister | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/19590920/tools/bin/hawqregister ---------------------------------------------------------------------- diff --git a/tools/bin/hawqregister b/tools/bin/hawqregister index fe979a9..380a548 100755 --- a/tools/bin/hawqregister +++ b/tools/bin/hawqregister @@ -162,7 +162,7 @@ def check_files_and_table_in_same_hdfs_cluster(filepath, tabledir): tableroot = tabledir.split('/') # check the root url of them. eg: for 'hdfs://localhost:8020/temp/tempfile', we check 'hdfs://localohst:8020' if fileroot[0] != tableroot[0] or fileroot[1] != tableroot[1] or fileroot[2] != tableroot[2]: - logger.error("Files to be registered and the table are not in the same hdfs cluster.\n Files to be registered are '%s'\n Table path in HDFS is '%s'" % (filepath, tabledir)) + logger.error("Files to be registered and the table are not in the same hdfs cluster.\nFile(s) to be registered: '%s'\nTable path in HDFS: '%s'" % (filepath, tabledir)) sys.exit(1) @@ -216,7 +216,7 @@ def move_files_in_hdfs(options, databasename, tablename, files, firstsegno, tabl segno += 1 if srcfile != dstfile: hdfscmd = "hadoop fs -mv %s %s" % (srcfile, dstfile) - sys.stdout.write("hafscmd: '%s'\n" % hdfscmd) + sys.stdout.write("hdfscmd: '%s'\n" % hdfscmd) result = local_ssh(hdfscmd) if result != 0: logger.error("Fail to move '%s' to '%s'" % (srcfile, dstfile)) @@ -229,7 +229,7 @@ def move_files_in_hdfs(options, databasename, tablename, files, firstsegno, tabl segno += 1 if srcfile != dstfile: hdfscmd = "hadoop fs -mv %s %s" % (srcfile, dstfile) - sys.stdout.write("hafscmd: '%s'\n" % hdfscmd) + sys.stdout.write("hdfscmd: '%s'\n" % hdfscmd) result = local_ssh(hdfscmd) if result != 0: logger.error("Fail to move '%s' to '%s'" % (srcfile, dstfile))
