Skip to content

Commit 855cc86

Browse files
authored
v1.0.0-rc1 preparation (#22)
* Add workflow job to publish to testpypi * Only build wheels when core tests are passing * Change version number to match Python version spec * Configure build steps for prereleased activity * Move Getting Started section higher in README * Remove the rcXX suffix from the cmake version as it doesn't like the Python versioning format * Bump cibuildwheel version
1 parent 2743bcc commit 855cc86

File tree

4 files changed

+87
-43
lines changed

4 files changed

+87
-43
lines changed

.github/workflows/test_and_deploy.yml

Lines changed: 46 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ on:
55
branches: [main]
66
pull_request:
77
release:
8-
types: [released]
8+
types: [prereleased, released]
99
jobs:
1010
test_repo:
1111
name: Test on ${{ matrix.os }} w/ Py${{ matrix.python-version }}
@@ -114,6 +114,7 @@ jobs:
114114
path: dist/
115115
build_wheels:
116116
name: Build wheels on Ubuntu
117+
needs: test_repo
117118
runs-on: ubuntu-20.04 # Can be also run for macOS
118119
steps:
119120
- uses: actions/checkout@v3
@@ -125,17 +126,17 @@ jobs:
125126
with:
126127
platforms: all
127128
- name: Build wheels
128-
if: github.event_name != 'release' && github.event.action != 'released'
129-
uses: pypa/cibuildwheel@v2.15.0 # The main configuration is in pyproject.toml
129+
if: github.event_name != 'release'
130+
uses: pypa/cibuildwheel@v2.19.2 # The main configuration is in pyproject.toml
130131
env:
131132
CIBW_BUILD: "cp311-manylinux*" # Build only python 3.11 wheels for testing
132133
# Increase verbosity to see what's going on in the build in case of failure
133134
CIBW_BUILD_VERBOSITY: 3
134135
CIBW_REPAIR_WHEEL_COMMAND_LINUX: >
135136
auditwheel show {wheel} && auditwheel repair -w {dest_dir} {wheel}
136137
- name: Build release wheels
137-
if: github.event_name == 'release' && github.event.action == 'released'
138-
uses: pypa/cibuildwheel@v2.15.0 # The main configuration is in pyproject.toml
138+
if: github.event_name == 'release' && (github.event.action == 'released' || github.event.action == 'prereleased')
139+
uses: pypa/cibuildwheel@v2.19.2 # The main configuration is in pyproject.toml
139140
env:
140141
# Set NLE_RELEASE_BUILD to 1 to build release wheels
141142
CIBW_ENVIRONMENT: "NLE_RELEASE_BUILD=1"
@@ -175,6 +176,46 @@ jobs:
175176
python -m pytest --import-mode=append -svx $REPONAME/nle/tests
176177
popd
177178
179+
# Use prereleases to test publish the artefacts to testpypi
180+
test_deploy:
181+
name: Deploy artefacts to testpypi
182+
needs: [test_sdist_3_11, test_manylinux_3_11]
183+
if: github.event_name == 'release' && github.event.action == 'prereleased'
184+
runs-on: ubuntu-latest
185+
environment:
186+
name: prerelease
187+
url: https://testpypi.org/p/nle
188+
permissions:
189+
id-token: write
190+
steps:
191+
- uses: actions/checkout@v4
192+
- name: Check version matches release tag
193+
run: |
194+
echo "v$(cat version.txt)"
195+
echo "${{ github.event.release.tag_name }}"
196+
[[ "${{ github.event.release.tag_name }}" == "v$(cat version.txt)" ]]
197+
- name: Get sdist artifact # Get sdist artifact from the test_sdist job
198+
uses: actions/download-artifact@v3
199+
with:
200+
name: python-sdist
201+
path: dist
202+
- name: Get wheels artifacts # Get wheels artifacts from the build_wheels job
203+
uses: actions/download-artifact@v3
204+
with:
205+
name: python-wheels
206+
path: dist
207+
- name: Report dist contents
208+
run: |
209+
pwd
210+
ls -R
211+
ls -al .
212+
ls -R dist/
213+
ls -al dist/
214+
- name: Publish package to TestPyPI
215+
uses: pypa/gh-action-pypi-publish@release/v1
216+
with:
217+
repository-url: https://test.pypi.org/legacy/
218+
178219
# TODO move this to separate workflow whenever github decides to provide basic
179220
# functionalities like workflow dependencies :|
180221
deploy_sdist:

CMakeLists.txt

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,8 @@
11
cmake_minimum_required(VERSION 3.15)
22
file(STRINGS "version.txt" NLE_VERSION)
3-
project(nle VERSION ${NLE_VERSION})
3+
# Remove any rcXX suffix from the version number as CMake doesn't like it
4+
string(REGEX REPLACE "rc[0-9+]$" "" CMAKE_NLE_VERSION ${NLE_VERSION})
5+
project(nle VERSION ${CMAKE_NLE_VERSION})
46

57
if(CMAKE_BUILD_TYPE MATCHES Debug)
68
message("Debug build.")
@@ -30,7 +32,7 @@ else()
3032
message("Some other build type.")
3133
endif()
3234

33-
message(STATUS "Building nle backend version: ${NLE_VERSION}")
35+
message(STATUS "Building nle backend version: ${CMAKE_NLE_VERSION}")
3436

3537
set(CMAKE_POSITION_INDEPENDENT_CODE ON)
3638

README.md

Lines changed: 36 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -28,41 +28,6 @@ README](./README.nh), at [nethack.org](https://nethack.org/), and on the
2828

2929
This version of NLE uses the [Farama Organisation Gymnasium Environment](https://gymnasium.farama.org) APIs.
3030

31-
### NLE Language Wrapper
32-
33-
We thank [ngoodger](https://github.com/ngoodger) for implementing the [NLE Language Wrapper](https://github.com/ngoodger/nle-language-wrapper) that translates the non-language observations from NetHack tasks into similar language representations. Actions can also be optionally provided in text form which are converted to the Discrete actions of the NLE.
34-
35-
### NetHack Learning Dataset
36-
37-
The NetHack Learning Dataset (NLD) code now ships with `NLE`, allowing users to the load large-scale datasets featured in [Dungeons and Data: A Large-Scale NetHack Dataset](), while also generating and loading their own datasets.
38-
39-
```python
40-
import nle.dataset as nld
41-
42-
if not nld.db.exists():
43-
nld.db.create()
44-
# NB: Different methods are used for data based on NLE and data from NAO.
45-
nld.add_nledata_directory("/path/to/nld-aa", "nld-aa-v0")
46-
nld.add_altorg_directory("/path/to/nld-nao", "nld-nao-v0")
47-
48-
dataset = nld.TtyrecDataset("nld-aa-v0", batch_size=128, ...)
49-
for i, mb in enumerate(dataset):
50-
foo(mb) # etc...
51-
```
52-
53-
For information on how to download NLD-AA and NLD-NAO, see the dataset doc [here](./DATASET.md).
54-
55-
Otherwise checkout the tutorial Colab notebook [here](https://colab.research.google.com/drive/1GRP15SbOEDjbyhJGMDDb2rXAptRQztUD?usp=sharing).
56-
57-
# Papers using the NetHack Learning Environment
58-
- Izumiya and Simo-Serra [Inventory Management with Attention-Based Meta Actions](https://esslab.jp/~ess/publications/IzumiyaCOG2021.pdf) (Waseda University, CoG 2021).
59-
- Samvelyan et al. [MiniHack the Planet: A Sandbox for Open-Ended Reinforcement Learning Research](https://arxiv.org/abs/2109.13202) (FAIR, UCL, Oxford, NeurIPS 2021).
60-
- Zhang et al. [BeBold: Exploration Beyond the Boundary of Explored Regions](https://arxiv.org/abs/2012.08621) (Berkley, FAIR, Dec 2020).
61-
- Küttler et al. [The NetHack Learning Environment](https://arxiv.org/abs/2006.13760) (FAIR, Oxford, NYU, Imperial, UCL, NeurIPS 2020).
62-
63-
Open a [pull
64-
request](https://github.com/heiner/nle/edit/main/README.md)
65-
to add papers.
6631

6732
# Getting started
6833

@@ -200,6 +165,42 @@ $ python -m nle.scripts.plot
200165
steps
201166
```
202167

168+
### NLE Language Wrapper
169+
170+
We thank [ngoodger](https://github.com/ngoodger) for implementing the [NLE Language Wrapper](https://github.com/ngoodger/nle-language-wrapper) that translates the non-language observations from NetHack tasks into similar language representations. Actions can also be optionally provided in text form which are converted to the Discrete actions of the NLE.
171+
172+
### NetHack Learning Dataset
173+
174+
The NetHack Learning Dataset (NLD) code now ships with `NLE`, allowing users to the load large-scale datasets featured in [Dungeons and Data: A Large-Scale NetHack Dataset](), while also generating and loading their own datasets.
175+
176+
```python
177+
import nle.dataset as nld
178+
179+
if not nld.db.exists():
180+
nld.db.create()
181+
# NB: Different methods are used for data based on NLE and data from NAO.
182+
nld.add_nledata_directory("/path/to/nld-aa", "nld-aa-v0")
183+
nld.add_altorg_directory("/path/to/nld-nao", "nld-nao-v0")
184+
185+
dataset = nld.TtyrecDataset("nld-aa-v0", batch_size=128, ...)
186+
for i, mb in enumerate(dataset):
187+
foo(mb) # etc...
188+
```
189+
190+
For information on how to download NLD-AA and NLD-NAO, see the dataset doc [here](./DATASET.md).
191+
192+
Otherwise checkout the tutorial Colab notebook [here](https://colab.research.google.com/drive/1GRP15SbOEDjbyhJGMDDb2rXAptRQztUD?usp=sharing).
193+
194+
# Papers using the NetHack Learning Environment
195+
- Izumiya and Simo-Serra [Inventory Management with Attention-Based Meta Actions](https://esslab.jp/~ess/publications/IzumiyaCOG2021.pdf) (Waseda University, CoG 2021).
196+
- Samvelyan et al. [MiniHack the Planet: A Sandbox for Open-Ended Reinforcement Learning Research](https://arxiv.org/abs/2109.13202) (FAIR, UCL, Oxford, NeurIPS 2021).
197+
- Zhang et al. [BeBold: Exploration Beyond the Boundary of Explored Regions](https://arxiv.org/abs/2012.08621) (Berkley, FAIR, Dec 2020).
198+
- Küttler et al. [The NetHack Learning Environment](https://arxiv.org/abs/2006.13760) (FAIR, Oxford, NYU, Imperial, UCL, NeurIPS 2020).
199+
200+
Open a [pull
201+
request](https://github.com/heiner/nle/edit/main/README.md)
202+
to add papers.
203+
203204

204205

205206
# Contributing

version.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
1.0.0
1+
1.0.0rc1

0 commit comments

Comments
 (0)