1
+ #! /bin/bash
2
+
3
+ set -e
4
+
5
+ echo " Checking for updates to workbench-template from GitHub"
6
+
7
+ WORKSPACE_DIR=" /workspaces/spark-template"
8
+
9
+ MARKER_DIR=" /var/lib/spark/.versions"
10
+ RELEASE_MARKER_FILE=" $MARKER_DIR /release"
11
+ TOOLS_MARKER_FILE=" $MARKER_DIR /tools"
12
+
13
+ sudo mkdir -p " $MARKER_DIR "
14
+
15
+ # Fetch the latest release information
16
+ LATEST_RELEASE=$( curl -s -H " Authorization: token $TEMPLATE_PAT " https://api.github.com/repos/github/spark-template/releases/latest)
17
+
18
+ # Check if marker file exists and has the same release ID
19
+ RELEASE_ID=$( echo " $LATEST_RELEASE " | jq -r ' .id' )
20
+ if [ -f " $RELEASE_MARKER_FILE " ] && [ " $( cat " $RELEASE_MARKER_FILE " ) " == " $RELEASE_ID " ]; then
21
+ echo " Already at the latest release. Skipping download."
22
+ exit 0
23
+ fi
24
+
25
+ echo " New version found. Downloading latest release."
26
+
27
+ TEMP_DIR=$( mktemp -d)
28
+ cd $TEMP_DIR
29
+
30
+ DOWNLOAD_URL=$( echo " $LATEST_RELEASE " | jq -r ' .assets[0].url' )
31
+ curl -L -o dist.zip -H " Authorization: token $TEMPLATE_PAT " -H " Accept: application/octet-stream" " $DOWNLOAD_URL "
32
+
33
+ unzip -o dist.zip
34
+ rm dist.zip
35
+
36
+ # Upgrade the Spark Runtime tools
37
+ sudo mv ./spark-sdk-dist/server.js /usr/local/bin/spark-server
38
+ sudo mv ./spark-sdk-dist/designer.js /usr/local/bin/spark-designer
39
+ sudo mv ./spark-sdk-dist/upload-to-remote.sh /usr/local/bin/upload-to-remote.sh
40
+ sudo mv ./spark-sdk-dist/deploy.sh /usr/local/bin/deploy.sh
41
+ sudo mv ./spark-sdk-dist/file-syncer.js /usr/local/bin/spark-file-syncer
42
+ sudo mv ./spark-sdk-dist/spark-agent.js /usr/local/bin/spark-agent
43
+ sudo cp ./spark-sdk-dist/proxy.js /workspaces/proxy.js
44
+ sudo mv ./spark-sdk-dist/proxy.js /usr/local/bin/proxy.js
45
+
46
+ # Upgrade the Spark Tools package
47
+ if [ -f " $TOOLS_MARKER_FILE " ] && [ " $( cat " $TOOLS_MARKER_FILE " ) " == " $( cat ./spark-sdk-dist/spark-tools-version) " ]; then
48
+ echo " Already at the latest tools version. Skipping extraction."
49
+ else
50
+ tar -xzf ./spark-sdk-dist/spark-tools.tgz
51
+
52
+ sudo rm -rf $WORKSPACE_DIR /packages/spark-tools
53
+ mkdir -p $WORKSPACE_DIR /packages/spark-tools
54
+ sudo mv ./package/* $WORKSPACE_DIR /packages/spark-tools
55
+ sudo rmdir ./package
56
+
57
+ cd $WORKSPACE_DIR
58
+ npm i -f
59
+ cd - > /dev/null
60
+
61
+ sudo cp ./spark-sdk-dist/spark-tools-version " $TOOLS_MARKER_FILE "
62
+ fi
63
+
64
+ # Upgrade the GH CLI extension
65
+ sudo rm -rf /usr/local/bin/gh-spark-cli
66
+ sudo mv spark-sdk-dist/gh-spark-cli /usr/local/bin/
67
+ cd /usr/local/bin/gh-spark-cli
68
+ # The --force option on gh extension install isn't honored for local, so manually remove it first.
69
+ # It's not an issue if that fails though as that probably just means it was the first time running this.
70
+ gh extension remove spark-cli > /dev/null || true
71
+ gh extension install .
72
+ gh alias set spark spark-cli --clobber
73
+ cd - > /dev/null
74
+
75
+ rm -rf $TEMP_DIR
76
+
77
+ # Update marker file with latest release ID
78
+ echo " $RELEASE_ID " | sudo tee " $RELEASE_MARKER_FILE " > /dev/null
79
+
80
+ echo " Tools installed successfully."
0 commit comments