This is an automated email from the ASF dual-hosted git repository.
liuxun pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/gravitino-playground.git
The following commit(s) were added to refs/heads/main by this push:
new 4b4c13b Fix Playground fileset demo issue and update the readme (#55)
4b4c13b is described below
commit 4b4c13b6537e99273cb293aa8f88b98f64eaeafb
Author: Jerry Shao <[email protected]>
AuthorDate: Fri Jul 19 11:14:23 2024 +0800
Fix Playground fileset demo issue and update the readme (#55)
This is PR tries to fix two things:
1. Fix the playground fileset demo issue to make it work.
2. Update the README to remove the support of several arguments that is
misleading.
---
README.md | 21 +++------------------
init/jupyter/gravitino-fileset-example.ipynb | 8 ++++----
launch-playground.sh | 8 +-------
3 files changed, 8 insertions(+), 29 deletions(-)
diff --git a/README.md b/README.md
index 41ec2ff..9ae2272 100644
--- a/README.md
+++ b/README.md
@@ -49,24 +49,6 @@ cd gravitino-playground
./launch-playground.sh
```
-### Launch BigData components of playground
-```shell
-git clone [email protected]:datastrato/gravitino-playground.git
-cd gravitino-playground
-./launch-playground.sh bigdata
-# equivalent to
-./launch-playground.sh hive gravitino trino postgresql mysql spark
-```
-
-### Launch AI components of playground
-```shell
-git clone [email protected]:datastrato/gravitino-playground.git
-cd gravitino-playground
-./launch-playground.sh ai
-# equivalent to
-./launch-playground.sh hive gravitino mysql jupyter
-```
-
### Launch special component or components of playground
```shell
git clone [email protected]:datastrato/gravitino-playground.git
@@ -74,6 +56,9 @@ cd gravitino-playground
./launch-playground.sh hive|gravitino|trino|postgresql|mysql|spark|jupyter
```
+Note. Components have dependencies, only launching one or several components
cannot experience
+the full functionality of the playground.
+
## Experiencing Apache Gravitino with Trino SQL
### Using Trino CLI in Docker Container
diff --git a/init/jupyter/gravitino-fileset-example.ipynb
b/init/jupyter/gravitino-fileset-example.ipynb
index 41e7256..60c3ae2 100644
--- a/init/jupyter/gravitino-fileset-example.ipynb
+++ b/init/jupyter/gravitino-fileset-example.ipynb
@@ -35,7 +35,7 @@
"metadata": {},
"outputs": [],
"source": [
- "pip install gravitino"
+ "pip install gravitino==0.5.1"
]
},
{
@@ -98,7 +98,7 @@
"catalog_ident=NameIdentifier.of_catalog(metalake_name, catalog_name)\n",
"\n",
"catalog = gravitino_client.create_catalog(ident=catalog_ident,\n",
- " type=Catalog.Type.FILESET,\n",
+ "
catalog_type=Catalog.Type.FILESET,\n",
" provider=\"hadoop\", \n",
" comment=\"\",\n",
" properties={})\n",
@@ -157,7 +157,7 @@
"\n",
"managed_fileset_ident: NameIdentifier =
NameIdentifier.of_fileset(metalake_name, catalog_name, schema_name,
managed_fileset_name)\n",
"catalog.as_fileset_catalog().create_fileset(ident=managed_fileset_ident,\n",
- " type=Fileset.Type.MANAGED,\n",
+ "
fileset_type=Fileset.Type.MANAGED,\n",
" comment=\"\",\n",
"
storage_location=managed_fileset_hdfs_path,\n",
" properties={})\n",
@@ -194,7 +194,7 @@
"# Create a external type of fileset\n",
"external_fileset_ident: NameIdentifier =
NameIdentifier.of_fileset(metalake_name, catalog_name, schema_name,
external_fileset_name)\n",
"catalog.as_fileset_catalog().create_fileset(ident=external_fileset_ident,\n",
- "
type=Fileset.Type.EXTERNAL,\n",
+ "
fileset_type=Fileset.Type.EXTERNAL,\n",
" comment=\"\",\n",
"
storage_location=external_fileset_hdfs_path,\n",
" properties={})"
diff --git a/launch-playground.sh b/launch-playground.sh
index 1993bed..1c220cb 100755
--- a/launch-playground.sh
+++ b/launch-playground.sh
@@ -31,12 +31,6 @@ fi
components=""
case "${1}" in
- bigdata)
- components="hive gravitino trino postgresql mysql spark"
- ;;
- ai)
- components="hive gravitino mysql jupyter"
- ;;
*)
components=$@
esac
@@ -45,4 +39,4 @@ cd ${playground_dir}
docker-compose up ${components}
# Clean Docker containers when you quit this script
-docker-compose down
\ No newline at end of file
+docker-compose down