Github user jackylk commented on a diff in the pull request:

    https://github.com/apache/carbondata/pull/2440#discussion_r200026300
  
    --- Diff: 
store/core/src/main/java/org/apache/carbondata/store/master/Master.java ---
    @@ -0,0 +1,530 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one or more
    + * contributor license agreements.  See the NOTICE file distributed with
    + * this work for additional information regarding copyright ownership.
    + * The ASF licenses this file to You under the Apache License, Version 2.0
    + * (the "License"); you may not use this file except in compliance with
    + * the License.  You may obtain a copy of the License at
    + *
    + *    http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +
    +package org.apache.carbondata.store.master;
    +
    +import java.io.File;
    +import java.io.IOException;
    +import java.lang.ref.SoftReference;
    +import java.net.BindException;
    +import java.util.ArrayList;
    +import java.util.HashMap;
    +import java.util.List;
    +import java.util.Map;
    +import java.util.Objects;
    +import java.util.Random;
    +import java.util.Set;
    +import java.util.UUID;
    +import java.util.concurrent.ExecutionException;
    +import java.util.concurrent.Future;
    +import java.util.concurrent.TimeUnit;
    +import java.util.concurrent.TimeoutException;
    +
    +import org.apache.carbondata.common.logging.LogService;
    +import org.apache.carbondata.common.logging.LogServiceFactory;
    +import org.apache.carbondata.core.constants.CarbonCommonConstants;
    +import org.apache.carbondata.core.datastore.block.Distributable;
    +import org.apache.carbondata.core.datastore.impl.FileFactory;
    +import org.apache.carbondata.core.datastore.row.CarbonRow;
    +import org.apache.carbondata.core.exception.InvalidConfigurationException;
    +import org.apache.carbondata.core.fileoperations.FileWriteOperation;
    +import org.apache.carbondata.core.locks.CarbonLockUtil;
    +import org.apache.carbondata.core.locks.ICarbonLock;
    +import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier;
    +import org.apache.carbondata.core.metadata.SegmentFileStore;
    +import org.apache.carbondata.core.metadata.converter.SchemaConverter;
    +import 
org.apache.carbondata.core.metadata.converter.ThriftWrapperSchemaConverterImpl;
    +import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
    +import org.apache.carbondata.core.metadata.schema.table.TableInfo;
    +import org.apache.carbondata.core.mutate.CarbonUpdateUtil;
    +import org.apache.carbondata.core.scan.expression.Expression;
    +import org.apache.carbondata.core.statusmanager.LoadMetadataDetails;
    +import org.apache.carbondata.core.statusmanager.SegmentStatus;
    +import org.apache.carbondata.core.statusmanager.SegmentStatusManager;
    +import org.apache.carbondata.core.util.CarbonProperties;
    +import org.apache.carbondata.core.util.CarbonUtil;
    +import org.apache.carbondata.core.util.path.CarbonTablePath;
    +import org.apache.carbondata.core.writer.ThriftWriter;
    +import org.apache.carbondata.hadoop.CarbonMultiBlockSplit;
    +import org.apache.carbondata.hadoop.api.CarbonInputFormat;
    +import org.apache.carbondata.hadoop.api.CarbonTableInputFormat;
    +import org.apache.carbondata.hadoop.util.CarbonInputFormatUtil;
    +import org.apache.carbondata.processing.loading.model.CarbonLoadModel;
    +import org.apache.carbondata.processing.util.CarbonLoaderUtil;
    +import org.apache.carbondata.store.conf.StoreConf;
    +import org.apache.carbondata.store.exception.ExecutionTimeoutException;
    +import org.apache.carbondata.store.exception.StoreException;
    +import org.apache.carbondata.store.rest.controller.Horizon;
    +import org.apache.carbondata.store.rpc.RegistryService;
    +import org.apache.carbondata.store.rpc.ServiceFactory;
    +import org.apache.carbondata.store.rpc.StoreService;
    +import org.apache.carbondata.store.rpc.impl.RegistryServiceImpl;
    +import org.apache.carbondata.store.rpc.impl.Status;
    +import org.apache.carbondata.store.rpc.model.BaseResponse;
    +import org.apache.carbondata.store.rpc.model.LoadDataRequest;
    +import org.apache.carbondata.store.rpc.model.QueryRequest;
    +import org.apache.carbondata.store.rpc.model.QueryResponse;
    +import org.apache.carbondata.store.rpc.model.RegisterWorkerRequest;
    +import org.apache.carbondata.store.rpc.model.RegisterWorkerResponse;
    +import org.apache.carbondata.store.rpc.model.ShutdownRequest;
    +import org.apache.carbondata.store.scheduler.Schedulable;
    +import org.apache.carbondata.store.scheduler.Scheduler;
    +import org.apache.carbondata.store.util.StoreUtil;
    +
    +import org.apache.hadoop.conf.Configuration;
    +import org.apache.hadoop.ipc.RPC;
    +import org.apache.hadoop.mapred.JobConf;
    +import org.apache.hadoop.mapreduce.InputSplit;
    +import org.apache.hadoop.mapreduce.Job;
    +
    +/**
    + * Master of CarbonSearch.
    + * It provides a Registry service for worker to register.
    + * And it provides search API to fire RPC call to workers.
    + */
    +
    +public class Master {
    +
    +  private static Master instance = null;
    +
    +  private static LogService LOGGER = 
LogServiceFactory.getLogService(Master.class.getName());
    +
    +  private Map<String, SoftReference<CarbonTable>> cacheTables;
    +
    +  // worker host address map to EndpointRef
    +  private StoreConf conf;
    +  private Configuration hadoopConf;
    +  private Random random = new Random();
    +  private RPC.Server registryServer = null;
    +  private Scheduler scheduler = new Scheduler();
    +
    +  private Master(StoreConf conf) {
    +    cacheTables = new HashMap<>();
    +    this.conf = conf;
    +    this.hadoopConf = this.conf.newHadoopConf();
    +  }
    +
    +  public void start() {
    --- End diff --
    
    It seems `startService` is enough


---

Reply via email to