Skip to content

Commit 247adef

Browse files
authored
Merge branch 'main' into alexagillman-onstopcommand
2 parents a54c6c6 + 7c07b7f commit 247adef

File tree

6 files changed

+725
-76
lines changed

6 files changed

+725
-76
lines changed

.devcontainer/devcontainer.json

Lines changed: 29 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -1,32 +1,32 @@
11
{
2-
"name": "Workbench template",
3-
"image": "mcr.microsoft.com/devcontainers/typescript-node:1-22-bookworm",
4-
"hostRequirements": {
5-
"storage": "32gb",
6-
"memory": "8gb",
7-
"cpus": 4
8-
},
9-
// Runs automatically after Prebuild Template environment is created.
10-
"onCreateCommand": ".devcontainer/onCreate.sh",
11-
// Runs automatically every time the dev container has been (re-)started
12-
"postStartCommand": {
13-
"app": "USE_DESIGNER=true USE_SPARK_AGENT=true npm run dev",
14-
"ports": "gh cs ports visibility 5000:public -c $CODESPACE_NAME",
15-
"spark-proxy": "proxy.js",
16-
"server": "spark-server",
17-
"spark-agent": "spark-agent",
18-
"spark-designer": "spark-designer",
19-
"spark-file-syncer": "spark-file-syncer 13000 >> /tmp/.spark-file-syncer.log 2>&1"
20-
},
21-
"customizations": {
22-
"codespaces": {
23-
"onStopCommand": "REVISION_NAME='spark-preview' /usr/local/bin/deploy.sh"
24-
}
25-
},
26-
"forwardPorts": [4000, 5000, 9000, 13000],
27-
"features": {
28-
"ghcr.io/devcontainers/features/sshd:1": {
29-
"version": "latest"
30-
}
2+
"name": "Workbench template",
3+
"image": "mcr.microsoft.com/devcontainers/typescript-node:1-22-bookworm",
4+
"hostRequirements": {
5+
"storage": "32gb",
6+
"memory": "8gb",
7+
"cpus": 4
8+
},
9+
// Runs automatically after Prebuild Template environment is created.
10+
"onCreateCommand": ".devcontainer/onCreate.sh",
11+
// Runs automatically every time the dev container has been (re-)started
12+
"postStartCommand": {
13+
"tools": ".devcontainer/postStartCommand.sh",
14+
"welcome-mat": "echo Welcome to Spark!"
15+
},
16+
"customizations": {
17+
"codespaces": {
18+
"onStopCommand": "REVISION_NAME='spark-preview' /usr/local/bin/deploy.sh"
19+
}
20+
},
21+
"forwardPorts": [
22+
4000,
23+
5000,
24+
9000,
25+
13000
26+
],
27+
"features": {
28+
"ghcr.io/devcontainers/features/sshd:1": {
29+
"version": "latest"
3130
}
3231
}
32+
}

.devcontainer/onCreate.sh

Lines changed: 4 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -21,34 +21,8 @@ azcopy_dir=$(find /usr/local/bin/ -type d -name "azcopy*" | head -n 1)
2121
sudo mv "$azcopy_dir/azcopy" /usr/local/bin/azcopy
2222
sudo rm -rf "$azcopy_dir"
2323

24+
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
25+
"$SCRIPT_DIR/refreshTools.sh"
2426

25-
LATEST_RELEASE=$(curl -s -H "Authorization: token $TEMPLATE_PAT" https://api.github.com/repos/github/spark-template/releases/latest)
26-
DOWNLOAD_URL=$(echo "$LATEST_RELEASE" | jq -r '.assets[0].url')
27-
curl -L -o dist.zip -H "Authorization: token $TEMPLATE_PAT" -H "Accept: application/octet-stream" "$DOWNLOAD_URL"
28-
unzip -o dist.zip
29-
rm dist.zip
30-
31-
sudo mv ./spark-sdk-dist/server.js /usr/local/bin/spark-server
32-
sudo mv ./spark-sdk-dist/designer.js /usr/local/bin/spark-designer
33-
sudo mv ./spark-sdk-dist/upload-to-remote.sh /usr/local/bin/upload-to-remote.sh
34-
sudo mv ./spark-sdk-dist/deploy.sh /usr/local/bin/deploy.sh
35-
sudo mv ./spark-sdk-dist/file-syncer.js /usr/local/bin/spark-file-syncer
36-
sudo mv ./spark-sdk-dist/spark-agent.js /usr/local/bin/spark-agent
37-
sudo cp ./spark-sdk-dist/proxy.js /workspaces/proxy.js
38-
sudo mv ./spark-sdk-dist/proxy.js /usr/local/bin/proxy.js
39-
40-
tar -xzf ./spark-sdk-dist/spark-tools.tgz
41-
42-
mkdir -p /workspaces/spark-template/packages/spark-tools
43-
sudo mv ./package/* /workspaces/spark-template/packages/spark-tools
44-
sudo rmdir ./package
45-
46-
sudo mv spark-sdk-dist/gh-spark-cli /usr/local/bin/
47-
cd /usr/local/bin/gh-spark-cli
48-
gh extension install .
49-
gh alias set spark spark-cli
50-
51-
rm -rf /workspaces/spark-template/spark-sdk-dist
52-
53-
cd /workspaces/spark-template
54-
npm i -f
27+
echo "Pre-starting the server and generating the optimized assets"
28+
npm run optimize --override

.devcontainer/postStartCommand.sh

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
#!/bin/bash
2+
3+
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
4+
"$SCRIPT_DIR/refreshTools.sh"
5+
6+
USE_DESIGNER=true USE_SPARK_AGENT=true npm run dev &
7+
proxy.js &
8+
spark-server &
9+
spark-agent &
10+
spark-designer &
11+
spark-file-syncer 13000 >> /tmp/.spark-file-syncer.log 2>&1 &
12+
13+
echo "Spark tools started successfully"
14+
15+
wait

.devcontainer/refreshTools.sh

Lines changed: 80 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,80 @@
1+
#!/bin/bash
2+
3+
set -e
4+
5+
echo "Checking for updates to workbench-template from GitHub"
6+
7+
WORKSPACE_DIR="/workspaces/spark-template"
8+
9+
MARKER_DIR="/var/lib/spark/.versions"
10+
RELEASE_MARKER_FILE="$MARKER_DIR/release"
11+
TOOLS_MARKER_FILE="$MARKER_DIR/tools"
12+
13+
sudo mkdir -p "$MARKER_DIR"
14+
15+
# Fetch the latest release information
16+
LATEST_RELEASE=$(curl -s -H "Authorization: token $TEMPLATE_PAT" https://api.github.com/repos/github/spark-template/releases/latest)
17+
18+
# Check if marker file exists and has the same release ID
19+
RELEASE_ID=$(echo "$LATEST_RELEASE" | jq -r '.id')
20+
if [ -f "$RELEASE_MARKER_FILE" ] && [ "$(cat "$RELEASE_MARKER_FILE")" == "$RELEASE_ID" ]; then
21+
echo "Already at the latest release. Skipping download."
22+
exit 0
23+
fi
24+
25+
echo "New version found. Downloading latest release."
26+
27+
TEMP_DIR=$(mktemp -d)
28+
cd $TEMP_DIR
29+
30+
DOWNLOAD_URL=$(echo "$LATEST_RELEASE" | jq -r '.assets[0].url')
31+
curl -L -o dist.zip -H "Authorization: token $TEMPLATE_PAT" -H "Accept: application/octet-stream" "$DOWNLOAD_URL"
32+
33+
unzip -o dist.zip
34+
rm dist.zip
35+
36+
# Upgrade the Spark Runtime tools
37+
sudo mv ./spark-sdk-dist/server.js /usr/local/bin/spark-server
38+
sudo mv ./spark-sdk-dist/designer.js /usr/local/bin/spark-designer
39+
sudo mv ./spark-sdk-dist/upload-to-remote.sh /usr/local/bin/upload-to-remote.sh
40+
sudo mv ./spark-sdk-dist/deploy.sh /usr/local/bin/deploy.sh
41+
sudo mv ./spark-sdk-dist/file-syncer.js /usr/local/bin/spark-file-syncer
42+
sudo mv ./spark-sdk-dist/spark-agent.js /usr/local/bin/spark-agent
43+
sudo cp ./spark-sdk-dist/proxy.js /workspaces/proxy.js
44+
sudo mv ./spark-sdk-dist/proxy.js /usr/local/bin/proxy.js
45+
46+
# Upgrade the Spark Tools package
47+
if [ -f "$TOOLS_MARKER_FILE" ] && [ "$(cat "$TOOLS_MARKER_FILE")" == "$(cat ./spark-sdk-dist/spark-tools-version)" ]; then
48+
echo "Already at the latest tools version. Skipping extraction."
49+
else
50+
tar -xzf ./spark-sdk-dist/spark-tools.tgz
51+
52+
sudo rm -rf $WORKSPACE_DIR/packages/spark-tools
53+
mkdir -p $WORKSPACE_DIR/packages/spark-tools
54+
sudo mv ./package/* $WORKSPACE_DIR/packages/spark-tools
55+
sudo rmdir ./package
56+
57+
cd $WORKSPACE_DIR
58+
npm i -f
59+
cd - >/dev/null
60+
61+
sudo cp ./spark-sdk-dist/spark-tools-version "$TOOLS_MARKER_FILE"
62+
fi
63+
64+
# Upgrade the GH CLI extension
65+
sudo rm -rf /usr/local/bin/gh-spark-cli
66+
sudo mv spark-sdk-dist/gh-spark-cli /usr/local/bin/
67+
cd /usr/local/bin/gh-spark-cli
68+
# The --force option on gh extension install isn't honored for local, so manually remove it first.
69+
# It's not an issue if that fails though as that probably just means it was the first time running this.
70+
gh extension remove spark-cli >/dev/null || true
71+
gh extension install .
72+
gh alias set spark spark-cli --clobber
73+
cd - >/dev/null
74+
75+
rm -rf $TEMP_DIR
76+
77+
# Update marker file with latest release ID
78+
echo "$RELEASE_ID" | sudo tee "$RELEASE_MARKER_FILE" > /dev/null
79+
80+
echo "Tools installed successfully."

0 commit comments

Comments
 (0)
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy