dianfu commented on code in PR #19919:
URL: https://github.com/apache/flink/pull/19919#discussion_r895511261


##########
flink-python/pyflink/datastream/connectors/__init__.py:
##########
@@ -16,58 +16,26 @@
 # limitations under the License.
 
################################################################################
 from pyflink.datastream.connectors.base import Sink, Source, DeliveryGuarantee
+from pyflink.datastream.connectors.number_seq import NumberSequenceSource
 from pyflink.datastream.connectors.file_system import (FileEnumeratorProvider, 
FileSink, FileSource,
                                                        BucketAssigner, 
FileSourceBuilder,
                                                        
FileSplitAssignerProvider, OutputFileConfig,
                                                        RollingPolicy,
                                                        StreamFormat, 
StreamingFileSink)
-from pyflink.datastream.connectors.jdbc import JdbcSink, 
JdbcConnectionOptions, JdbcExecutionOptions
-from pyflink.datastream.connectors.kafka import FlinkKafkaConsumer, 
FlinkKafkaProducer, Semantic
-from pyflink.datastream.connectors.number_seq import NumberSequenceSource
-from pyflink.datastream.connectors.pulsar import PulsarDeserializationSchema, 
PulsarSource, \
-    PulsarSourceBuilder, SubscriptionType, StartCursor, StopCursor, 
PulsarSerializationSchema, \
-    PulsarSink, PulsarSinkBuilder, MessageDelayer, TopicRoutingMode
-from pyflink.datastream.connectors.rabbitmq import RMQConnectionConfig, 
RMQSource, RMQSink
-from pyflink.datastream.connectors.kinesis import (FlinkKinesisConsumer, 
KinesisStreamsSink,
-                                                   KinesisFirehoseSink)
-
 
 __all__ = [
     'Sink',
     'Source',
     'DeliveryGuarantee',
+    'NumberSequenceSource',
+    'BucketAssigner',
     'FileEnumeratorProvider',
     'FileSink',
     'FileSource',
-    'BucketAssigner',
     'FileSourceBuilder',
     'FileSplitAssignerProvider',
-    'FlinkKafkaConsumer',
-    'FlinkKafkaProducer',
-    'Semantic',
-    'JdbcSink',
-    'JdbcConnectionOptions',
-    'JdbcExecutionOptions',
-    'NumberSequenceSource',
     'OutputFileConfig',
-    'PulsarDeserializationSchema',
-    'PulsarSource',
-    'PulsarSourceBuilder',
-    'SubscriptionType',
-    'PulsarSerializationSchema',
-    'PulsarSink',
-    'PulsarSinkBuilder',
-    'MessageDelayer',
-    'TopicRoutingMode',
-    'RMQConnectionConfig',
-    'RMQSource',
-    'RMQSink',
     'RollingPolicy',

Review Comment:
   @deadwind4 Yes, of course. BTW, we should also considering the backward 
compatibility. One way that that may work is as following:
   ```
   def _install():
       from pyflink.datastream.connectors import jdbc
       setattr(pyflink.datastream.connectors, 'JdbcSink', jdbc.JdbcSink)
       ...
   
   _install()
   del _install
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to