Skip to content

Commit

Permalink
Updated public release (ai2cm#5)
Browse files Browse the repository at this point in the history
* Delete existing files

* Copy full-model code from 028c085e

* Restore README.md from ace repo

* Update README.md to add future docs and fix example link

* Update full-model refs to ace/ACE

* Remove refs to internal compute systems from README

* Update version to 0.2.0

* Delete GPU CI test because it wont run on this repo

* Remove Documentation section from README

* Delete internal stuff from Makefile

* Remove internal data processing configs/scripts

* Also delete test_config.py
  • Loading branch information
oliverwm1 authored Jul 29, 2024
1 parent 50d49b8 commit 7178ce2
Show file tree
Hide file tree
Showing 227 changed files with 26,605 additions and 4,426 deletions.
37 changes: 37 additions & 0 deletions .github/workflows/docs.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
name: docs

on:
push:
branches:
- main
pull_request:
branches:
- main

jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
- uses: actions/cache@v4
with:
path: ${{ env.pythonLocation }}
key: ${{ env.pythonLocation }}-${{ hashFiles('fme/requirements.txt') }}-${{ hashFiles('fme/docs/requirements.txt') }}-${{ hashFiles('fme/constraints.txt') }}
- name: Install dependencies
run: |
python -m pip install uv==0.2.5
uv pip install --system -c constraints.txt -e fme[docs]
- name: Build docs
run: |
cd fme/docs && make doctest html
- name: Deploy to GitHub Pages
uses: peaceiris/actions-gh-pages@v3
if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }}
with:
publish_branch: gh-pages
github_token: ${{ secrets.GITHUB_TOKEN }}
publish_dir: fme/docs/_build/
force_orphan: true
33 changes: 0 additions & 33 deletions .github/workflows/fme-unit-tests.yaml

This file was deleted.

29 changes: 29 additions & 0 deletions .github/workflows/tests.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
name: tests

on:
push:
branches:
- main
pull_request:
branches:
- main

jobs:
cpu:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
- uses: actions/cache@v4
with:
path: ${{ env.pythonLocation }}
key: ${{ env.pythonLocation }}-${{ hashFiles('fme/requirements.txt') }}-${{ hashFiles('fme/dev-requirements.txt') }}-${{ hashFiles('fme/constraints.txt') }}
- name: Install dependencies
run: |
python -m pip install uv==0.2.5
uv pip install --system -c constraints.txt -e fme[dev]
- name: Run pytest
run: |
make test
7 changes: 7 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
fme/docs/available_modules.rst
fme/docs/_build

.vscode

# Byte-compiled / optimized / DLL files
Expand Down Expand Up @@ -72,6 +75,7 @@ instance/

# Sphinx documentation
docs/_build/
.DS_Store

# PyBuilder
target/
Expand Down Expand Up @@ -129,3 +133,6 @@ dmypy.json

# Pyre type checker
.pyre/

# scratch directory for testing
scratch/
7 changes: 1 addition & 6 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,8 +1,3 @@
exclude: |
(?x)^(
models/fcn-mip/.+ |
models/FourCastNet/.+
)$
repos:
- repo: https://github.com/psf/black
rev: 23.3.0
Expand All @@ -24,7 +19,7 @@ repos:
name: flake8 __init__.py files
files: "__init__.py"
# ignore unused import error in __init__.py files
args: ["--ignore=F401,E203", --config, setup.cfg]
args: ["--ignore=F401,E203,W503", --config, setup.cfg]
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.2.0
hooks:
Expand Down
29 changes: 5 additions & 24 deletions Makefile
Original file line number Diff line number Diff line change
@@ -1,37 +1,18 @@
VERSION ?= $(shell git rev-parse --short HEAD)
IMAGE ?= fme
ENVIRONMENT_NAME ?= fme
USERNAME ?= $(shell beaker account whoami --format=json | jq -r '.[0].name')

build_docker_image:
docker build -f docker/Dockerfile -t $(IMAGE):$(VERSION) .

build_beaker_image: build_docker_image
beaker image create --name $(IMAGE)-$(VERSION) $(IMAGE):$(VERSION)

build_podman_image:
podman-hpc build -f docker/Dockerfile -t $(IMAGE):$(VERSION) .

migrate_podman_image: build_podman_image
podman-hpc migrate $(IMAGE):$(VERSION)

enter_docker_image: build_docker_image
docker run -it --rm $(IMAGE):$(VERSION) bash

launch_beaker_session:
./launch-beaker-session.sh $(USERNAME)/$(IMAGE)-$(VERSION)

install_local_packages:
./install_local_packages.sh

install_dependencies:
./install_dependencies.sh

# recommended to deactivate current conda environment before running this
create_environment:
conda create -n $(ENVIRONMENT_NAME) python=3.8 pip
conda run -n $(ENVIRONMENT_NAME) ./install_dependencies.sh
conda run -n $(ENVIRONMENT_NAME) ./install_local_packages.sh
conda create -n $(ENVIRONMENT_NAME) python=3.10 pip
conda run --no-capture-output -n $(ENVIRONMENT_NAME) python -m pip install uv==0.2.5
conda run --no-capture-output -n $(ENVIRONMENT_NAME) uv pip install -c constraints.txt -e fme[dev]

test_fme_unit_tests:
pytest -m "not requires_gpu" --durations 10 fme/
test:
pytest --durations 20 .
22 changes: 2 additions & 20 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,9 +1,6 @@
# ACE: AI2 Climate Emulator
This repo contains the inference code accompanying "ACE: A fast, skillful learned global atmospheric model for climate prediction" ([arxiv:2310.02074](https://arxiv.org/abs/2310.02074)).

## DISCLAIMER
This is rapidly changing research software. We make no guarantees of maintaining backwards compatibility.

## Quickstart

### 1. Install
Expand Down Expand Up @@ -35,27 +32,12 @@ enough data to span the desired prediction period. The checkpoint is also availa
`gs://ai2cm-public-requester-pays/2023-11-29-ai2-climate-emulator-v1/checkpoints/ace_ckpt.tar`.

### 3. Update configuration and run
Update the paths in the [example config](examples/config-inference.yaml). Then in the
Update the paths in the [example config](fme/docs/inference-config.yaml). Then in the
`fme` conda environment, run inference with:
```
python -m fme.fcn_training.inference.inference examples/config-inference.yaml
python -m fme.ace.inference fme/docs/inference-config.yaml
```

## Configuration options
See the `InferenceConfig` class in [this file](fme/fme/fcn_training/inference/inference.py) for
description of configuration options. The [example config](examples/config-inference.yaml)
shows some useful defaults for performing a 400-step simulation (100 days, with the 6-hour time step).

## Performance
While inference can be performed without a GPU, it may be very slow in that case. In addition,
I/O performance is critical for fast inference due to loading of forcing data and target data
during inference.

## Analyzing output
Various climate performance metrics are computed online by the inference code. These can be viewed via
[wandb](https://wandb.ai) by setting `logging.log_to_wandb` to true and updating `logging.entity`
to your wandb entity. Additionally, raw output data is saved to netCDF by the inference code.

## Available datasets
Two versions of the dataset described in [arxiv:2310.02074](https://arxiv.org/abs/2310.02074)
are available:
Expand Down
1 change: 1 addition & 0 deletions constraints.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
torch==2.1.2 # minor version matches torch in Docker image
16 changes: 7 additions & 9 deletions docker/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,23 +1,21 @@
FROM nvcr.io/nvidia/pytorch:22.08-py3
FROM nvcr.io/nvidia/pytorch:23.08-py3

ENV FME_DIR=/full-model
ENV DGLBACKEND=pytorch

# Install gcloud
# Install gcloud- used for monthly netcdf data processing script
# https://cloud.google.com/sdk/docs/install#deb
RUN echo "deb [signed-by=/usr/share/keyrings/cloud.google.gpg] http://packages.cloud.google.com/apt cloud-sdk main" | \
tee -a /etc/apt/sources.list.d/google-cloud-sdk.list && curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | \
apt-key --keyring /usr/share/keyrings/cloud.google.gpg add - && apt-get update -y && apt-get install google-cloud-cli -y

# install python deps
COPY requirements_except_torch.txt /tmp/requirements.txt
COPY requirements_no_deps.txt /tmp/requirements_no_deps.txt
COPY fme/requirements.txt /tmp/requirements.txt
RUN python3 -m pip install -r /tmp/requirements.txt
RUN python3 -m pip install --no-deps -r /tmp/requirements_no_deps.txt

# copy local code
# copy local code and install
COPY fme ${FME_DIR}/fme
COPY install_local_packages.sh ${FME_DIR}/install_local_packages.sh
RUN cd $FME_DIR && pip install --no-deps -e fme

# install packages in full-model repo
RUN cd $FME_DIR && ./install_local_packages.sh
# copy after install so editing scripts does not trigger reinstall
COPY scripts ${FME_DIR}/scripts
17 changes: 0 additions & 17 deletions examples/config-inference.yaml

This file was deleted.

20 changes: 20 additions & 0 deletions fme/docs/Makefile
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
# Minimal makefile for Sphinx documentation
#

# You can set these variables from the command line.
SPHINXOPTS =
SPHINXBUILD = python -msphinx
SPHINXPROJ = fme
SOURCEDIR = .
BUILDDIR = _build

# Put it first so that "make" without argument is like "make help".
help:
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)

.PHONY: help Makefile

# Catch-all target: route all unknown targets to Sphinx using the new
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
%: Makefile
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
17 changes: 17 additions & 0 deletions fme/docs/api.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
.. _API Reference:

=============
API Reference
=============

fme
===

.. automodule:: fme
:members:

fme.ace
=======

.. automodule:: fme.ace
:members:
Loading

0 comments on commit 7178ce2

Please sign in to comment.