ajantha-bhat commented on a change in pull request #4034:
URL: https://github.com/apache/carbondata/pull/4034#discussion_r553975760



##########
File path: 
integration/presto/src/main/prestosql/org/apache/carbondata/presto/CarbondataConnectorFactory.java
##########
@@ -101,127 +52,109 @@
     }
   }
 
-  public CarbondataConnectorFactory(String connectorName, ClassLoader 
classLoader) {
-    super(connectorName, classLoader, Optional.empty());
-    this.classLoader = requireNonNull(classLoader, "classLoader is null");
+  public CarbondataConnectorFactory(String name) {
+    this(name, EmptyModule.class);
+  }
+
+  public CarbondataConnectorFactory(String connectorName, Class<? extends 
Module> module) {
+    super(connectorName, module);
+    this.module = module;
   }
 
+
   @Override
   public Connector create(String catalogName, Map<String, String> config,
       ConnectorContext context) {
-    requireNonNull(config, "config is null");
-
-    try (ThreadContextClassLoader ignored = new 
ThreadContextClassLoader(classLoader)) {
-      Bootstrap app = new Bootstrap(
-          new EventModule(),
-          new MBeanModule(),
-          new ConnectorObjectNameGeneratorModule(catalogName),
-          new JsonModule(),
-          new CarbondataModule(catalogName),
-          new HiveS3Module(),
-          new HiveGcsModule(),
-          new HiveMetastoreModule(Optional.ofNullable(null)),
-          new HiveSecurityModule(),
-          new HiveAuthenticationModule(),
-          new HiveProcedureModule(),
-          new MBeanServerModule(),
-          binder -> {
-            binder.bind(NodeVersion.class).toInstance(
-                new 
NodeVersion(context.getNodeManager().getCurrentNode().getVersion()));
-            
binder.bind(NodeManager.class).toInstance(context.getNodeManager());
-            
binder.bind(VersionEmbedder.class).toInstance(context.getVersionEmbedder());
-            
binder.bind(TypeManager.class).toInstance(context.getTypeManager());
-            
binder.bind(PageIndexerFactory.class).toInstance(context.getPageIndexerFactory());
-            binder.bind(PageSorter.class).toInstance(context.getPageSorter());
-            binder.bind(HiveCatalogName.class).toInstance(new 
HiveCatalogName(catalogName));
-            configBinder(binder).bindConfig(CarbonTableConfig.class);
-          });
-
-      Injector injector = app
-          .strictConfig()
-          .doNotInitializeLogging()
-          .setRequiredConfigurationProperties(config)
-          .initialize();
-
-      LifeCycleManager lifeCycleManager = 
injector.getInstance(LifeCycleManager.class);
-      HiveMetadataFactory metadataFactory = 
injector.getInstance(HiveMetadataFactory.class);
-      HiveTransactionManager transactionManager =
-          injector.getInstance(HiveTransactionManager.class);
-      ConnectorSplitManager splitManager = 
injector.getInstance(ConnectorSplitManager.class);
-      ConnectorPageSourceProvider connectorPageSource =
-          injector.getInstance(ConnectorPageSourceProvider.class);
-      ConnectorPageSinkProvider pageSinkProvider =
-          injector.getInstance(ConnectorPageSinkProvider.class);
-      ConnectorNodePartitioningProvider connectorDistributionProvider =
-          injector.getInstance(ConnectorNodePartitioningProvider.class);
-      HiveSessionProperties hiveSessionProperties =
-          injector.getInstance(HiveSessionProperties.class);
-      HiveTableProperties hiveTableProperties = 
injector.getInstance(HiveTableProperties.class);
-      HiveAnalyzeProperties hiveAnalyzeProperties =
-          injector.getInstance(HiveAnalyzeProperties.class);
-      ConnectorAccessControl accessControl =
-          new 
SystemTableAwareAccessControl(injector.getInstance(ConnectorAccessControl.class));
-      Set<Procedure> procedures = injector.getInstance(Key.get(new 
TypeLiteral<Set<Procedure>>() {
-      }));
-
-      return new HiveConnector(lifeCycleManager, metadataFactory, 
transactionManager,
-          new ClassLoaderSafeConnectorSplitManager(splitManager, classLoader),
-          new ClassLoaderSafeConnectorPageSourceProvider(connectorPageSource, 
classLoader),
-          new ClassLoaderSafeConnectorPageSinkProvider(pageSinkProvider, 
classLoader),
-          new 
ClassLoaderSafeNodePartitioningProvider(connectorDistributionProvider, 
classLoader),
-          ImmutableSet.of(), procedures, 
hiveSessionProperties.getSessionProperties(),
-          HiveSchemaProperties.SCHEMA_PROPERTIES, 
hiveTableProperties.getTableProperties(),
-          hiveAnalyzeProperties.getAnalyzeProperties(), accessControl, 
classLoader);
-    } catch (Exception e) {
-      throwIfUnchecked(e);
+    ClassLoader classLoader = context.duplicatePluginClassLoader();
+    try {
+      Object moduleInstance = 
classLoader.loadClass(this.module.getName()).getConstructor().newInstance();
+      Class<?> moduleClass = classLoader.loadClass(Module.class.getName());
+      return (Connector) 
classLoader.loadClass(InternalCarbonDataConnectorFactory.class.getName())
+          .getMethod("createConnector", String.class, Map.class, 
ConnectorContext.class, moduleClass)
+          .invoke(null, catalogName, config, context, moduleInstance);
+    }
+    catch (InvocationTargetException e) {
+      Throwable targetException = e.getTargetException();
+      throwIfUnchecked(targetException);
+      throw new RuntimeException(targetException);
+    }
+    catch (ReflectiveOperationException e) {
       throw new RuntimeException(e);
     }
   }
 
   /**
    * Set the Carbon format enum to HiveStorageFormat, its a hack but for time 
being it is best
    * choice to avoid lot of code change.
+   *
+   * @throws Exception

Review comment:
       it is good to have proper java doc. Also it doesn't have any params, but 
stills throws exception. so recording in java doc. you can also observe similar 
in old code 




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
[email protected]


Reply via email to