Skip to content

Commit

Permalink
Checking stopped docker
Browse files Browse the repository at this point in the history
  • Loading branch information
ilias1111 committed Aug 8, 2024
1 parent 59b1eb6 commit a11584d
Show file tree
Hide file tree
Showing 2 changed files with 9 additions and 5 deletions.
6 changes: 4 additions & 2 deletions .github/workflows/pr_tests_spark.yml
Original file line number Diff line number Diff line change
Expand Up @@ -74,11 +74,13 @@ jobs:
run: |
docker-compose up -d
echo "Waiting for Spark services to start..."
sleep 120
sleep 30 # Reduced sleep time
docker-compose ps
docker-compose logs
- name: Check running containers
working-directory: .github/workflows/spark_deployment
run: docker ps
run: docker ps -a # Show all containers, including stopped ones

- name: Print Docker logs
if: failure()
Expand Down
8 changes: 5 additions & 3 deletions .github/workflows/spark_deployment/docker-compose.yml
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
version: '3'

networks:
spark-network:
driver: bridge
Expand All @@ -6,7 +8,7 @@ services:
spark-master:
image: snowplow/spark-s3-iceberg:v2
platform: ${DOCKER_PLATFORM:-linux/amd64}
command: ["/bin/bash", "-c", "/opt/spark/sbin/start-master.sh -h spark-master --properties-file /opt/spark/conf/spark-defaults.conf && tail -f /opt/spark/logs/spark--org.apache.spark.deploy.master.Master-1-*.out"]
command: ["/bin/bash", "-c", "/opt/spark/sbin/start-master.sh -h spark-master --properties-file /opt/spark/conf/spark-defaults.conf && tail -f /dev/null"]
hostname: spark-master
ports:
- '8080:8080'
Expand All @@ -28,7 +30,7 @@ services:
spark-worker:
image: snowplow/spark-s3-iceberg:v2
platform: ${DOCKER_PLATFORM:-linux/amd64}
command: ["/bin/bash", "-c", "sleep 10 && /opt/spark/sbin/start-worker.sh spark://spark-master:7077 --properties-file /opt/spark/conf/spark-defaults.conf && tail -f /opt/spark/logs/spark--org.apache.spark.deploy.worker.Worker-*.out"]
command: ["/bin/bash", "-c", "sleep 10 && /opt/spark/sbin/start-worker.sh spark://spark-master:7077 --properties-file /opt/spark/conf/spark-defaults.conf && tail -f /dev/null"]
depends_on:
- spark-master
environment:
Expand All @@ -49,7 +51,7 @@ services:
thrift-server:
image: snowplow/spark-s3-iceberg:v2
platform: ${DOCKER_PLATFORM:-linux/amd64}
command: ["/bin/bash", "-c", "sleep 30 && /opt/spark/sbin/start-thriftserver.sh --master spark://spark-master:7077 --driver-memory 2g --executor-memory 3g --hiveconf hive.server2.thrift.port=10000 --hiveconf hive.server2.thrift.bind.host=0.0.0.0 --conf spark.sql.hive.thriftServer.async=true --conf spark.sql.hive.thriftServer.workerQueue.size=2000 --conf spark.sql.hive.thriftServer.maxWorkerThreads=100 --conf spark.sql.hive.thriftServer.minWorkerThreads=50 && tail -f /opt/spark/logs/spark--org.apache.spark.sql.hive.thriftserver.HiveThriftServer2-*.out"]
command: ["/bin/bash", "-c", "sleep 30 && /opt/spark/sbin/start-thriftserver.sh --master spark://spark-master:7077 --driver-memory 2g --executor-memory 3g --hiveconf hive.server2.thrift.port=10000 --hiveconf hive.server2.thrift.bind.host=0.0.0.0 --conf spark.sql.hive.thriftServer.async=true --conf spark.sql.hive.thriftServer.workerQueue.size=2000 --conf spark.sql.hive.thriftServer.maxWorkerThreads=100 --conf spark.sql.hive.thriftServer.minWorkerThreads=50 && tail -f /dev/null"]
ports:
- '10000:10000'
depends_on:
Expand Down

0 comments on commit a11584d

Please sign in to comment.