Repository: systemml Updated Branches: refs/heads/master dc4bfd95e -> 81b9248fc
[MINOR] Resolve race condition between locking of metastore_db of Scala SparkSession and PySpark SparkSession when using SystemML MLContext API Project: http://git-wip-us.apache.org/repos/asf/systemml/repo Commit: http://git-wip-us.apache.org/repos/asf/systemml/commit/81b9248f Tree: http://git-wip-us.apache.org/repos/asf/systemml/tree/81b9248f Diff: http://git-wip-us.apache.org/repos/asf/systemml/diff/81b9248f Branch: refs/heads/master Commit: 81b9248fc700303c1542d709e18479bc57147b3a Parents: dc4bfd9 Author: Niketan Pansare <[email protected]> Authored: Sun Aug 13 13:42:30 2017 -0700 Committer: Niketan Pansare <[email protected]> Committed: Sun Aug 13 13:49:33 2017 -0700 ---------------------------------------------------------------------- src/main/python/systemml/mlcontext.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/systemml/blob/81b9248f/src/main/python/systemml/mlcontext.py ---------------------------------------------------------------------- diff --git a/src/main/python/systemml/mlcontext.py b/src/main/python/systemml/mlcontext.py index 5841ab5..60705c5 100644 --- a/src/main/python/systemml/mlcontext.py +++ b/src/main/python/systemml/mlcontext.py @@ -26,19 +26,26 @@ util_methods = [ 'jvm_stdout', '_java2py', 'getHopDAG' ] __all__ = ['MLResults', 'MLContext', 'Script', 'Matrix' ] + script_factory_methods + util_methods import os - +import numpy as np +import pandas as pd +import threading, time + try: import py4j.java_gateway from py4j.java_gateway import JavaObject from pyspark import SparkContext from pyspark.conf import SparkConf import pyspark.mllib.common + # ----------------------------------------------------------------------------------- + # Avoids race condition between locking of metastore_db of Scala SparkSession and PySpark SparkSession + from pyspark.sql import SparkSession + SparkSession.builder.getOrCreate().createDataFrame(pd.DataFrame(np.array([[1,2],[3,4]]))) + # ----------------------------------------------------------------------------------- except ImportError: raise ImportError('Unable to import `pyspark`. Hint: Make sure you are running with PySpark.') from .converters import * from .classloader import * -import threading, time _loadedSystemML = False def _get_spark_context():
