Skip to content

Commit 7c07b7f

Browse files
authored
Handle resuming and pulling new tools (#4)
This update introduces improvements to the dev container setup by restructuring the logic for tool installation and execution: 1. **Tool Installation and Updates:** - Moved the logic for installing tools and executables and the CLI extension from the `onCreateCommand` to a dedicated `refreshTools` script. - The `refreshTools` script is now invoked during the `postStartCommand` (instead of `onCreateCommand`, ensuring that tools are updated and initialized at both create and resume time. - `refreshTools` uses marker files to ensure that we don't keep repulling tools if the version hasn't changed 2. **Tool Invocation:** - Tool starting is moved from the devcontainer.json into a new `postStartCommand` script - This ensures that the tool refresh always finishes before the tools are started
1 parent 7d2e021 commit 7c07b7f

File tree

6 files changed

+720
-72
lines changed

6 files changed

+720
-72
lines changed

.devcontainer/devcontainer.json

Lines changed: 24 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -1,28 +1,27 @@
11
{
2-
"name": "Workbench template",
3-
"image": "mcr.microsoft.com/devcontainers/typescript-node:1-22-bookworm",
4-
"hostRequirements": {
5-
"storage": "32gb",
6-
"memory": "8gb",
7-
"cpus": 4
8-
},
9-
// Runs automatically after Prebuild Template environment is created.
10-
"onCreateCommand": ".devcontainer/onCreate.sh",
11-
// Runs automatically every time the dev container has been (re-)started
12-
"postStartCommand": {
13-
"app": "USE_DESIGNER=true USE_SPARK_AGENT=true npm run dev",
14-
"ports": "gh cs ports visibility 5000:public -c $CODESPACE_NAME",
15-
"spark-proxy": "proxy.js",
16-
"server": "spark-server",
17-
"spark-agent": "spark-agent",
18-
"spark-designer": "spark-designer",
19-
"spark-file-syncer": "spark-file-syncer 13000 >> /tmp/.spark-file-syncer.log 2>&1"
20-
},
21-
"forwardPorts": [4000, 5000, 9000, 13000],
22-
"features": {
23-
"ghcr.io/devcontainers/features/sshd:1": {
24-
"version": "latest"
25-
}
2+
"name": "Workbench template",
3+
"image": "mcr.microsoft.com/devcontainers/typescript-node:1-22-bookworm",
4+
"hostRequirements": {
5+
"storage": "32gb",
6+
"memory": "8gb",
7+
"cpus": 4
8+
},
9+
// Runs automatically after Prebuild Template environment is created.
10+
"onCreateCommand": ".devcontainer/onCreate.sh",
11+
// Runs automatically every time the dev container has been (re-)started
12+
"postStartCommand": {
13+
"tools": ".devcontainer/postStartCommand.sh",
14+
"welcome-mat": "echo Welcome to Spark!"
15+
},
16+
"forwardPorts": [
17+
4000,
18+
5000,
19+
9000,
20+
13000
21+
],
22+
"features": {
23+
"ghcr.io/devcontainers/features/sshd:1": {
24+
"version": "latest"
2625
}
2726
}
28-
27+
}

.devcontainer/onCreate.sh

Lines changed: 4 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -21,34 +21,8 @@ azcopy_dir=$(find /usr/local/bin/ -type d -name "azcopy*" | head -n 1)
2121
sudo mv "$azcopy_dir/azcopy" /usr/local/bin/azcopy
2222
sudo rm -rf "$azcopy_dir"
2323

24+
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
25+
"$SCRIPT_DIR/refreshTools.sh"
2426

25-
LATEST_RELEASE=$(curl -s -H "Authorization: token $TEMPLATE_PAT" https://api.github.com/repos/github/spark-template/releases/latest)
26-
DOWNLOAD_URL=$(echo "$LATEST_RELEASE" | jq -r '.assets[0].url')
27-
curl -L -o dist.zip -H "Authorization: token $TEMPLATE_PAT" -H "Accept: application/octet-stream" "$DOWNLOAD_URL"
28-
unzip -o dist.zip
29-
rm dist.zip
30-
31-
sudo mv ./spark-sdk-dist/server.js /usr/local/bin/spark-server
32-
sudo mv ./spark-sdk-dist/designer.js /usr/local/bin/spark-designer
33-
sudo mv ./spark-sdk-dist/upload-to-remote.sh /usr/local/bin/upload-to-remote.sh
34-
sudo mv ./spark-sdk-dist/deploy.sh /usr/local/bin/deploy.sh
35-
sudo mv ./spark-sdk-dist/file-syncer.js /usr/local/bin/spark-file-syncer
36-
sudo mv ./spark-sdk-dist/spark-agent.js /usr/local/bin/spark-agent
37-
sudo cp ./spark-sdk-dist/proxy.js /workspaces/proxy.js
38-
sudo mv ./spark-sdk-dist/proxy.js /usr/local/bin/proxy.js
39-
40-
tar -xzf ./spark-sdk-dist/spark-tools.tgz
41-
42-
mkdir -p /workspaces/spark-template/packages/spark-tools
43-
sudo mv ./package/* /workspaces/spark-template/packages/spark-tools
44-
sudo rmdir ./package
45-
46-
sudo mv spark-sdk-dist/gh-spark-cli /usr/local/bin/
47-
cd /usr/local/bin/gh-spark-cli
48-
gh extension install .
49-
gh alias set spark spark-cli
50-
51-
rm -rf /workspaces/spark-template/spark-sdk-dist
52-
53-
cd /workspaces/spark-template
54-
npm i -f
27+
echo "Pre-starting the server and generating the optimized assets"
28+
npm run optimize --override

.devcontainer/postStartCommand.sh

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
#!/bin/bash
2+
3+
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
4+
"$SCRIPT_DIR/refreshTools.sh"
5+
6+
USE_DESIGNER=true USE_SPARK_AGENT=true npm run dev &
7+
proxy.js &
8+
spark-server &
9+
spark-agent &
10+
spark-designer &
11+
spark-file-syncer 13000 >> /tmp/.spark-file-syncer.log 2>&1 &
12+
13+
echo "Spark tools started successfully"
14+
15+
wait

.devcontainer/refreshTools.sh

Lines changed: 80 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,80 @@
1+
#!/bin/bash
2+
3+
set -e
4+
5+
echo "Checking for updates to workbench-template from GitHub"
6+
7+
WORKSPACE_DIR="/workspaces/spark-template"
8+
9+
MARKER_DIR="/var/lib/spark/.versions"
10+
RELEASE_MARKER_FILE="$MARKER_DIR/release"
11+
TOOLS_MARKER_FILE="$MARKER_DIR/tools"
12+
13+
sudo mkdir -p "$MARKER_DIR"
14+
15+
# Fetch the latest release information
16+
LATEST_RELEASE=$(curl -s -H "Authorization: token $TEMPLATE_PAT" https://api.github.com/repos/github/spark-template/releases/latest)
17+
18+
# Check if marker file exists and has the same release ID
19+
RELEASE_ID=$(echo "$LATEST_RELEASE" | jq -r '.id')
20+
if [ -f "$RELEASE_MARKER_FILE" ] && [ "$(cat "$RELEASE_MARKER_FILE")" == "$RELEASE_ID" ]; then
21+
echo "Already at the latest release. Skipping download."
22+
exit 0
23+
fi
24+
25+
echo "New version found. Downloading latest release."
26+
27+
TEMP_DIR=$(mktemp -d)
28+
cd $TEMP_DIR
29+
30+
DOWNLOAD_URL=$(echo "$LATEST_RELEASE" | jq -r '.assets[0].url')
31+
curl -L -o dist.zip -H "Authorization: token $TEMPLATE_PAT" -H "Accept: application/octet-stream" "$DOWNLOAD_URL"
32+
33+
unzip -o dist.zip
34+
rm dist.zip
35+
36+
# Upgrade the Spark Runtime tools
37+
sudo mv ./spark-sdk-dist/server.js /usr/local/bin/spark-server
38+
sudo mv ./spark-sdk-dist/designer.js /usr/local/bin/spark-designer
39+
sudo mv ./spark-sdk-dist/upload-to-remote.sh /usr/local/bin/upload-to-remote.sh
40+
sudo mv ./spark-sdk-dist/deploy.sh /usr/local/bin/deploy.sh
41+
sudo mv ./spark-sdk-dist/file-syncer.js /usr/local/bin/spark-file-syncer
42+
sudo mv ./spark-sdk-dist/spark-agent.js /usr/local/bin/spark-agent
43+
sudo cp ./spark-sdk-dist/proxy.js /workspaces/proxy.js
44+
sudo mv ./spark-sdk-dist/proxy.js /usr/local/bin/proxy.js
45+
46+
# Upgrade the Spark Tools package
47+
if [ -f "$TOOLS_MARKER_FILE" ] && [ "$(cat "$TOOLS_MARKER_FILE")" == "$(cat ./spark-sdk-dist/spark-tools-version)" ]; then
48+
echo "Already at the latest tools version. Skipping extraction."
49+
else
50+
tar -xzf ./spark-sdk-dist/spark-tools.tgz
51+
52+
sudo rm -rf $WORKSPACE_DIR/packages/spark-tools
53+
mkdir -p $WORKSPACE_DIR/packages/spark-tools
54+
sudo mv ./package/* $WORKSPACE_DIR/packages/spark-tools
55+
sudo rmdir ./package
56+
57+
cd $WORKSPACE_DIR
58+
npm i -f
59+
cd - >/dev/null
60+
61+
sudo cp ./spark-sdk-dist/spark-tools-version "$TOOLS_MARKER_FILE"
62+
fi
63+
64+
# Upgrade the GH CLI extension
65+
sudo rm -rf /usr/local/bin/gh-spark-cli
66+
sudo mv spark-sdk-dist/gh-spark-cli /usr/local/bin/
67+
cd /usr/local/bin/gh-spark-cli
68+
# The --force option on gh extension install isn't honored for local, so manually remove it first.
69+
# It's not an issue if that fails though as that probably just means it was the first time running this.
70+
gh extension remove spark-cli >/dev/null || true
71+
gh extension install .
72+
gh alias set spark spark-cli --clobber
73+
cd - >/dev/null
74+
75+
rm -rf $TEMP_DIR
76+
77+
# Update marker file with latest release ID
78+
echo "$RELEASE_ID" | sudo tee "$RELEASE_MARKER_FILE" > /dev/null
79+
80+
echo "Tools installed successfully."

0 commit comments

Comments
 (0)