Skip to content

Commit 0a25208

Browse files
"spark_run.sh exec" now works, add docker commands for "spark_run.sh exec" and "spark_run.sh driver", upload s3mdbseq_keys.txt from a standard path
1 parent 010cda9 commit 0a25208

File tree

3 files changed

+24
-17
lines changed

3 files changed

+24
-17
lines changed

entrypoint.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,6 @@ in
3232
;;
3333
exec)
3434
shift
35-
$*
35+
"$@"
3636
;;
3737
esac

scripts/S3_FSCK/upload_metadata_backup_keys_to_s3.sh

Lines changed: 1 addition & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -28,13 +28,6 @@ if [ ! -f "${CONFIG_FILE}" ];then
2828
exit 1
2929
fi
3030

31-
export BUCKETD_HOSTPORT="$(shyaml get-value bucketd.url < ${CONFIG_FILE} | awk -F / '{print $NF}')"
32-
if [ -z "${BUCKETD_HOSTPORT}" ];then
33-
34-
echo "Provide bucketd URL in ${CONFIG_FILE}."
35-
exit 1
36-
fi
37-
3831
S3_ENDPOINT="$(shyaml get-value s3.endpoint < ${CONFIG_FILE})"
3932
export AWS_ACCESS_KEY_ID="$(shyaml get-value s3.access_key < ${CONFIG_FILE})"
4033
export AWS_SECRET_ACCESS_KEY="$(shyaml get-value s3.secret_key < ${CONFIG_FILE})"
@@ -44,7 +37,7 @@ RING="$(shyaml get-value ring < ${CONFIG_FILE})"
4437
# We use an exotic path for WORKDIR to make sure the container
4538
# was started with the documented command line.
4639

47-
WORKDIR=/mnt/var_tmp
40+
WORKDIR=/opt/spark/tmp
4841

4942
if [ ! -d "${WORKDIR}" ];then
5043
echo "ERROR ${WORKDIR} must be mounted by docker/ctr."

spark_run.sh

Lines changed: 22 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -71,20 +71,34 @@ in
7171
shift
7272
case $container_command
7373
in
74+
docker)
75+
$container_command run --rm --net=host --name=EXEC \
76+
-v "${appsdir}:/opt/spark/apps:rw" \
77+
-v "${datadir}:/opt/spark/tmp:rw" \
78+
-v "${logsdir}:/opt/spark/spark-events:rw" \
79+
/opt/spark/entrypoint.sh exec "$@"
80+
;;
7481
ctr)
75-
ctr run --net-host --rm --mount="type=bind,src=${appsdir},dst=/opt/spark/apps,options=rbind:rw" \
82+
$container_command run --net-host --rm --mount="type=bind,src=${appsdir},dst=/opt/spark/apps,options=rbind:rw" \
7683
--mount="type=bind,src=${datadir},dst=/opt/spark/tmp,options=rbind:rw" \
77-
--mount="type=bind,src=${logdir},dst=/opt/spark/spark-events,options=rbind:rw" \
78-
"${IMAGE_NAME}:${VERSION}" EXEC ./entrypoint.sh exec $*
84+
--mount="type=bind,src=${logsdir},dst=/opt/spark/spark-events,options=rbind:rw" \
85+
"${IMAGE_NAME}:${VERSION}" EXEC /opt/spark/entrypoint.sh exec "$@"
7986
esac
8087
;;
8188
driver)
8289
case $container_command
8390
in
91+
docker)
92+
$container_command run --rm --net=host --name=SPARK-DRIVER \
93+
-v "${appsdir}:/opt/spark/apps:rw" \
94+
-v "${datadir}:/opt/spark/tmp:rw" \
95+
-v "${logsdir}:/opt/spark/spark-events:rw" \
96+
/opt/spark/entrypoint.sh driver
97+
;;
8498
ctr)
85-
ctr run --net-host --rm -cwd /opt/spark/apps --mount="type=bind,src=${appsdir},dst=/opt/spark/apps,options=rbind:rw" \
99+
$container_command run --net-host --rm -cwd /opt/spark/apps --mount="type=bind,src=${appsdir},dst=/opt/spark/apps,options=rbind:rw" \
86100
--mount="type=bind,src=${datadir},dst=/opt/spark/tmp,options=rbind:rw" \
87-
--mount="type=bind,src=${logdir},dst=/opt/spark/spark-events,options=rbind:rw" -t \
101+
--mount="type=bind,src=${logsdir},dst=/opt/spark/spark-events,options=rbind:rw" -t \
88102
"${IMAGE_NAME}:${VERSION}" SPARK-DRIVER /opt/spark/entrypoint.sh driver 2> /dev/null
89103
esac
90104
;;
@@ -108,7 +122,7 @@ in
108122
$container_command run --rm -dit --net=host --name spark-master \
109123
--hostname=spark-master \
110124
--add-host=spark-master:$master \
111-
--volume "${logdir}:/opt/spark/spark-events:rw" \
125+
--volume "${logsdir}:/opt/spark/spark-events:rw" \
112126
--volume "${datadir}:/opt/spark/tmp:rw" \
113127
"$host_storage" \
114128
${spark_image_full} \
@@ -121,7 +135,7 @@ in
121135
--hostname=spark-worker \
122136
--add-host=spark-master:$master \
123137
--add-host=spark-worker:"$local_worker" \
124-
--volume "${logdir}:/opt/spark/spark-events:rw" \
138+
--volume "${logsdir}:/opt/spark/spark-events:rw" \
125139
--volume "${datadir}:/opt/spark/tmp:rw" \
126140
"$host_storage" \
127141
${spark_image_full} \
@@ -162,7 +176,7 @@ in
162176
# start
163177
$container_command run -d --net-host \
164178
--env='SPARK_NO_DAEMONIZE=true' \
165-
--mount='type=bind,src='${logdir}',dst=/opt/spark/spark-events,options=rbind:rw' \
179+
--mount='type=bind,src='${logsdir}',dst=/opt/spark/spark-events,options=rbind:rw' \
166180
--mount='type=bind,src='${datadir}',dst=/opt/spark/tmp,options=rbind:rw' \
167181
${spark_image_full} spark-master \
168182
./entrypoint.sh master 2> /dev/null

0 commit comments

Comments
 (0)