Skip to content

Commit 0da6fea

Browse files
committed
merge main
2 parents a390c23 + e4061f2 commit 0da6fea

27 files changed

+4739
-4502
lines changed

.devcontainer/Dockerfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
FROM pangeo/base-image:2024.06.02
1+
FROM pangeo/base-image:2024.06.24

.github/dependabot.yml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
# Regularly update Docker tags and Actions steps
12
version: 2
23
updates:
34
- package-ecosystem: "docker"

.github/workflows/main.yaml

Lines changed: 29 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,10 @@
1-
name: CI
1+
name: Deploy Website to GitHub Pages
22

33
on:
44
push:
55
branches: main
6-
pull_request:
7-
branches: main
6+
paths-ignore:
7+
- ".devcontainer/**"
88

99
# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages
1010
permissions:
@@ -18,19 +18,19 @@ concurrency:
1818
cancel-in-progress: true
1919

2020
jobs:
21-
build-and-deploy:
21+
build:
2222
runs-on: ubuntu-latest
2323

2424
steps:
2525
- name: Checkout repository
26-
uses: actions/checkout@v3
26+
uses: actions/checkout@v4
2727

2828
- name: Setup JupyterBook Cache
29-
uses: actions/cache@v3
29+
uses: actions/cache@v4
3030
with:
3131
path: _build
3232
# NOTE: change key to "jupyterbook-DATE" to force rebuilding cache
33-
key: jupyterbook-20230707
33+
key: jupyterbook-20240517
3434

3535
- name: Install Conda environment with Micromamba
3636
uses: mamba-org/setup-micromamba@v1
@@ -50,19 +50,31 @@ jobs:
5050
run: |
5151
if (test -a _build/html/reports/*log); then cat _build/html/reports/*log ; fi
5252
53-
- name: Save Build
53+
- name: Save Build Folder
5454
if: always()
55-
uses: actions/upload-artifact@v3
55+
uses: actions/upload-artifact@v4
5656
with:
5757
name: build
5858
path: _build/
5959

60-
- name: Publish to GitHub Pages
61-
if: github.ref == 'refs/heads/main'
62-
uses: peaceiris/actions-gh-pages@v3
60+
- name: Upload Pages Artifact
61+
uses: actions/upload-pages-artifact@v3
6362
with:
64-
github_token: ${{ secrets.GITHUB_TOKEN }}
65-
publish_dir: _build/html
66-
publish_branch: gh-pages
67-
cname: tutorial.xarray.dev
68-
enable_jekyll: false
63+
path: _build/html
64+
65+
# Publish Website to GitHub Pages if built successfully
66+
deploy:
67+
needs: build
68+
if: github.ref == 'refs/heads/main'
69+
runs-on: ubuntu-latest
70+
environment:
71+
name: github-pages
72+
url: ${{ steps.deployment.outputs.page_url }}
73+
74+
steps:
75+
- name: Setup Pages
76+
uses: actions/configure-pages@v5
77+
78+
- name: Deploy to GitHub Pages
79+
id: deployment
80+
uses: actions/deploy-pages@v4

.github/workflows/preview.yaml

Lines changed: 0 additions & 64 deletions
This file was deleted.

.github/workflows/pull_request.yaml

Lines changed: 55 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,55 @@
1+
name: Pull Request Build
2+
3+
on:
4+
pull_request:
5+
types: [opened, synchronize, reopened, closed]
6+
paths-ignore:
7+
- ".devcontainer/**"
8+
9+
concurrency:
10+
group: ${{ github.workflow }}-${{ github.ref }}
11+
cancel-in-progress: true
12+
13+
jobs:
14+
preview:
15+
runs-on: ubuntu-latest
16+
defaults:
17+
run:
18+
shell: bash -el {0}
19+
steps:
20+
- name: Checkout repository
21+
if: github.event.action != 'closed'
22+
uses: actions/checkout@v4
23+
24+
- name: Setup JupyterBook Cache
25+
if: github.event.action != 'closed'
26+
uses: actions/cache@v4
27+
with:
28+
path: _build
29+
# NOTE: change key to "jupyterbook-DATE" to force rebuilding cache
30+
key: jupyterbook-20240517
31+
32+
- name: Install Conda environment with Micromamba
33+
if: github.event.action != 'closed'
34+
uses: mamba-org/setup-micromamba@v1
35+
with:
36+
environment-file: conda/conda-lock.yml
37+
environment-name: xarray-tutorial
38+
cache-environment: true
39+
40+
- name: Build JupyterBook
41+
if: github.event.action != 'closed'
42+
run: |
43+
jupyter-book build ./ --warningiserror --keep-going
44+
45+
- name: Dump Build Logs
46+
if: github.event.action != 'closed'
47+
run: |
48+
if (test -a _build/html/reports/*log); then cat _build/html/reports/*log ; fi
49+
50+
- name: Upload artifact
51+
if: github.event.action != 'closed'
52+
uses: actions/upload-artifact@v4
53+
with:
54+
name: html
55+
path: _build/html

.github/workflows/qaqc.yaml

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,8 @@ on:
44
pull_request:
55
branches:
66
- main
7+
paths-ignore:
8+
- ".devcontainer/**"
79

810
concurrency:
911
group: ${{ github.workflow }}-${{ github.ref }}
@@ -17,7 +19,7 @@ jobs:
1719
shell: bash -el {0}
1820

1921
steps:
20-
- uses: actions/checkout@v3
22+
- uses: actions/checkout@v4
2123

2224
- name: Install Conda environment with Micromamba
2325
uses: mamba-org/setup-micromamba@v1

.github/workflows/surge_preview.yml

Lines changed: 42 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,42 @@
1+
name: Pull Request Preview
2+
3+
on:
4+
workflow_run:
5+
workflows: ["Pull Request Build"]
6+
types:
7+
- completed
8+
9+
permissions:
10+
pull-requests: write # allow surge-preview to create/update PR comments
11+
12+
concurrency:
13+
group: ${{ github.workflow }}-${{ github.event.workflow_run.id }}
14+
cancel-in-progress: true
15+
16+
jobs:
17+
# NOTE: match job name in pull_request.yaml
18+
preview:
19+
runs-on: ubuntu-latest
20+
if: ${{ github.event.workflow_run.event == 'pull_request' && github.event.workflow_run.conclusion == 'success' }}
21+
22+
steps:
23+
# Ensure folder exists for PR 'closed' case
24+
- run: mkdir html
25+
26+
# Download built HTML from PR Build workflow
27+
- uses: actions/download-artifact@v4
28+
continue-on-error: true
29+
with:
30+
github-token: ${{ github.token }}
31+
run-id: ${{ github.event.workflow_run.id }}
32+
33+
- name: Manage Surge.sh Deployment
34+
id: preview_step
35+
uses: afc163/surge-preview@v1
36+
with:
37+
surge_token: ${{ secrets.SURGE_TOKEN }}
38+
github_token: ${{ secrets.GITHUB_TOKEN }}
39+
build: echo 'Uploading html/ folder contents to Surge.sh...'
40+
dist: html # NOTE: match upload_artifact name in pull_request.yaml
41+
failOnError: true
42+
teardown: true

.prettierignore

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
conda/**
1+
conda/

_config.yml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,6 @@ sphinx:
6868
# maintain old paths and redirect them (so google results dont go to 404)
6969
# https://github.com/wpilibsuite/sphinxext-rediraffe
7070
- sphinxext.rediraffe
71-
- sphinx_exercise
7271
- sphinx_codeautolink
7372

7473
config:

advanced/apply_ufunc/automatic-vectorizing-numpy.ipynb

Lines changed: 6 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -63,18 +63,17 @@
6363
" out[index, :] = np.interp(..., array[index, :], ...)\n",
6464
"```\n",
6565
"\n",
66-
"\n",
67-
"```{exercise}\n",
68-
":label: coreloopdims\n",
69-
"\n",
66+
"::::{admonition} Exercise\n",
67+
":class: tip\n",
7068
"Consider the example problem of interpolating a 2D array with dimensions `space` and `time` along the `time` dimension.\n",
7169
"Which dimension is the core dimension, and which is the \"loop dimension\"?\n",
72-
"```\n",
73-
"```{solution} coreloopdims\n",
70+
"\n",
71+
":::{admonition} Solution\n",
7472
":class: dropdown\n",
7573
"\n",
7674
"`time` is the core dimension, and `space` is the loop dimension.\n",
77-
"```\n",
75+
":::\n",
76+
"::::\n",
7877
"\n",
7978
"## Vectorization\n",
8079
"\n",

advanced/apply_ufunc/complex-output-numpy.ipynb

Lines changed: 12 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -138,19 +138,20 @@
138138
"tags": []
139139
},
140140
"source": [
141-
"```{exercise}\n",
142-
":label: newdim\n",
141+
"::::{admonition} Exercise\n",
142+
":class: tip\n",
143143
"\n",
144144
"Apply the following function using `apply_ufunc`. It adds a new dimension to the input array, let's call it `newdim`. Specify the new dimension using `output_core_dims`. Do you need any `input_core_dims`?\n",
145145
"\n",
146146
"```python\n",
147147
"def add_new_dim(array):\n",
148148
" return np.expand_dims(array, axis=-1)\n",
149149
"```\n",
150-
"````{solution} newdim\n",
150+
"\n",
151+
":::{admonition} Solution\n",
151152
":class: dropdown\n",
152153
"\n",
153-
"``` python\n",
154+
"```python\n",
154155
"def add_new_dim(array):\n",
155156
" return np.expand_dims(array, axis=-1)\n",
156157
"\n",
@@ -161,7 +162,8 @@
161162
" output_core_dims=[[\"newdim\"]],\n",
162163
")\n",
163164
"```\n",
164-
"````"
165+
":::\n",
166+
"::::"
165167
]
166168
},
167169
{
@@ -327,8 +329,8 @@
327329
"tags": []
328330
},
329331
"source": [
330-
"````{exercise}\n",
331-
":label: generalize\n",
332+
"::::{admonition} Exercise\n",
333+
":class: tip\n",
332334
"\n",
333335
"We presented the concept of \"core dimensions\" as the \"smallest unit of data the function could handle.\" Do you understand how the above use of `apply_ufunc` generalizes to an array with more than one dimension? \n",
334336
"\n",
@@ -337,9 +339,8 @@
337339
"air3d = xr.tutorial.load_dataset(\"air_temperature\").air)\n",
338340
"``` \n",
339341
"Your goal is to have a minimum and maximum value of temperature across all latitudes for a given time and longitude.\n",
340-
"````\n",
341342
"\n",
342-
"````{solution} generalize\n",
343+
":::{admonition} Solution\n",
343344
":class: dropdown\n",
344345
"\n",
345346
"We want to use `minmax` to compute the minimum and maximum along the \"lat\" dimension always, regardless of how many dimensions are on the input. So we specify `input_core_dims=[[\"lat\"]]`. The output does not contain the \"lat\" dimension, but we expect two returned variables. So we pass an empty list `[]` for each returned array, so `output_core_dims=[[], []]` just as before.\n",
@@ -352,8 +353,8 @@
352353
" input_core_dims=[[\"lat\"]],\n",
353354
" output_core_dims=[[],[]],\n",
354355
")\n",
355-
"```\n",
356-
"````"
356+
":::\n",
357+
"::::"
357358
]
358359
}
359360
],

0 commit comments

Comments
 (0)