forked from Al3zama1/microservices-ecommerce
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathdocker-compose-local.yml
138 lines (134 loc) · 3.57 KB
/
docker-compose-local.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
version: '3.8'
services:
product-service:
image: mongo:6.0.3
restart: always
ports:
- "27017:27017"
environment:
MONGO_INITDB_ROOT_USERNAME: test
MONGO_INITDB_ROOT_PASSWORD: test
MONGO_INITDB_DATABASE: product-service
order-service:
image: postgres:15.1
restart: always
ports:
- "5432:5432"
environment:
POSTGRES_PASSWORD: test
POSTGRES_USER: test
POSTGRES_DB: order-service
inventory-service:
image: postgres:15.1
restart: always
ports:
- "5433:5432"
environment:
POSTGRES_PASSWORD: test
POSTGRES_USER: test
POSTGRES_DB: inventory-service
keycloak:
image: keycloak/keycloak:20.0.3
command:
- "start-dev --http-relative-path /auth --import-realm --auto-build"
environment:
KEYCLOAK_DB: dev-file
KEYCLOAK_ADMIN: keycloak
KEYCLOAK_ADMIN_PASSWORD: keycloak
KEYCLOAK_FEATURES: scripts
volumes:
- type: bind
source: ./keycloak
target: /opt/keycloak/data/import
read_only: true
ports:
- "8888:8080"
zipkin:
image: openzipkin/zipkin:2.24-arm64
ports:
- "9411:9411"
zookeeper:
image: confluentinc/cp-zookeeper:7.3.0
container_name: zookeeper
environment:
ZOOKEEPER_CLIENT_PORT: 2181
# interval at which zookeeper will send heart beat
ZOOKEEPER_TICK_TIME: 2000
broker:
image: confluentinc/cp-kafka:7.3.0
container_name: broker
ports:
# To learn about configuring Kafka for access across networks see
# https://www.confluent.io/blog/kafka-client-cannot-connect-to-broker-on-aws-on-docker-etc/
- "9092:9092"
depends_on:
- zookeeper
environment:
KAFKA_BROKER_ID: 1
KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181'
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_INTERNAL:PLAINTEXT
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://localhost:9092,PLAINTEXT_INTERNAL://broker:29092
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
prometheus:
image: prom/prometheus:v2.41.0
container_name: prometheus
restart: always
ports:
- "9090:9090"
volumes:
- ./prometheus/prometheus.yml:/etc/prometheus/prometheus.yml
grafana:
image: grafana/grafana:9.3.4
container_name: grafana
restart: always
ports:
- "3000:3000"
# links:
# - prometheus:prometheus
volumes:
- ./grafana:/var/lib/grafana
environment:
GF_SECURITY_ADMIN_USER: test
GF_SECURITY_ADMIN_PASSWORD: test
# elastic-search:
# container_name: elastic-search
# image: elasticsearch:8.6.0
# restart: always
# volumes:
# - elastic_data:/usr/share/elasticsearch/data/
# environment:
# ES_JAVA_OPTS: '-Xmx256m -Xms256m'
# discovery.type: single-node # single node cluster
# xpack.security.enrollment.enabled: true
# ports:
# - '9200:9200'
# - '9300:9300'
#
# logstash:
# container_name: logstash
# image: logstash:8.6.0
# restart: always
# volumes:
# - ./logstash/:/logstash_dir
# command: logstash -f /logstash_dir/logstash.conf
# depends_on:
# - elastic-search
# ports:
# - '9600:9600'
# environment:
# LS_JAVA_OPTS: '-Xmx256m -Xms256m'
#
# kibana:
# container_name: kibana
# image: kibana:8.6.0
# restart: always
# ports:
# - '5601:5601'
# environment:
# - ELASTICSEARCH_URL=http://elastic-search:9200
# depends_on:
# - elastic-search
#volumes:
# elastic_data: {}