quanghuynguyen1902 opened a new issue #11263:
URL: https://github.com/apache/druid/issues/11263
I have file docker-compose as follow:
```javascript
zookeeper:
container_name: zookeeper
image: zookeeper:3.5
networks:
- nginx
environment:
- ZOO_MY_ID=1
coordinator:
image: apache/druid:0.20.2
container_name: coordinator
volumes:
- ./storage:/opt/data
- ./coordinator_var:/opt/druid/var
depends_on:
- zookeeper
- postgres
ports:
- "8081:8081"
command:
- coordinator
networks:
- nginx
env_file:
- environment
broker:
image: apache/druid:0.20.2
container_name: broker
volumes:
- ./broker_var:/opt/druid/var
depends_on:
- zookeeper
- postgres
- coordinator
ports:
- "8082:8082"
command:
- broker
networks:
- nginx
env_file:
- environment
historical:
image: apache/druid:0.20.2
container_name: historical
volumes:
- ./storage:/opt/data
- ./historical_var:/opt/druid/var
depends_on:
- zookeeper
- postgres
- coordinator
ports:
- "8083:8083"
command:
- historical
networks:
- nginx
env_file:
- environment
middlemanager:
image: apache/druid:0.20.2
container_name: middlemanager
volumes:
- ./storage:/opt/data
- ./middle_var:/opt/druid/var
depends_on:
- zookeeper
- postgres
- coordinator
ports:
- "8091:8091"
command:
- middleManager
networks:
- nginx
env_file:
- environment
router:
image: apache/druid:0.20.2
container_name: router
volumes:
- ./router_var:/opt/druid/var
depends_on:
- zookeeper
- postgres
- coordinator
ports:
- "8888:8888"
command:
- router
networks:
- nginx
env_file:
- environment
```
and I connect kakfa stream data as follow:
```python
import requests
# api push data to druid
druidURL = 'http://coordinator:8081/druid/indexer/v1/supervisor'
druid_schema_path = "kafka.json"
headers = {'content-type': 'application/json'}
with open(druid_schema_path, 'rb') as f:
response = requests.post(druidURL, headers=headers, data=f).json()
print(response)
```
and file kafka.json
```json
{
"type": "kafka",
"dataSchema": {
"dataSource": "requests",
"timestampSpec": {
"column": "timestamp"
},
"dimensionsSpec": {
"dimensions" : [
"created_at",
"client",
"status",
"url",
"user_agent",
"request_method",
"upstream_connect_time",
"upstream_header_time",
"upstream_response_time",
"request_time",
"size",
"user_id",
"app_id"
]
},
"granularitySpec": {
"type": "uniform",
"segmentGranularity": "HOUR",
"queryGranularity": "NONE"
}
},
"ioConfig": {
"topic": "requests",
"inputFormat": {
"type": "json"
},
"consumerProperties": {
"bootstrap.servers": "kafka:9092"
},
"taskCount": 1,
"replicas": 1,
"taskDuration": "PT1H"
},
"tuningConfig": {
"type": "kafka",
"maxRowsPerSegment": 5000000
}
}
```
when i run file python error
```python
could not resolve type id 'kafka' as a subtype of
`org.apache.druid.indexing.overlord.supervisor.supervisor spec
```
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]