diff --git a/docs/source/examples/02_nesting/03_multiple_subcommands.rst b/docs/source/examples/02_nesting/03_multiple_subcommands.rst
index 85590a40..78b3d9ea 100644
--- a/docs/source/examples/02_nesting/03_multiple_subcommands.rst
+++ b/docs/source/examples/02_nesting/03_multiple_subcommands.rst
@@ -52,6 +52,7 @@ Multiple unions over nested types are populated using a series of subcommands.
# Train script.
+ @tyro.conf.configure(tyro.conf.ConsolidateSubcommandArgs)
def train(
dataset: Union[Mnist, ImageNet] = Mnist(),
optimizer: Union[Adam, Sgd] = Adam(),
@@ -100,6 +101,14 @@ Multiple unions over nested types are populated using a series of subcommands.
.. raw:: html
- python 02_nesting/03_multiple_subcommands.py dataset:mnist optimizer:adam --optimizer.learning-rate 3e-4
+ python 02_nesting/03_multiple_subcommands.py dataset:mnist optimizer:adam --help
-.. program-output:: python ../../examples/02_nesting/03_multiple_subcommands.py dataset:mnist optimizer:adam --optimizer.learning-rate 3e-4
+.. program-output:: python ../../examples/02_nesting/03_multiple_subcommands.py dataset:mnist optimizer:adam --help
+
+------------
+
+.. raw:: html
+
+ python 02_nesting/03_multiple_subcommands.py dataset:mnist optimizer:adam --optimizer.learning-rate 3e-4 --dataset.binary
+
+.. program-output:: python ../../examples/02_nesting/03_multiple_subcommands.py dataset:mnist optimizer:adam --optimizer.learning-rate 3e-4 --dataset.binary
diff --git a/examples/02_nesting/03_multiple_subcommands.py b/examples/02_nesting/03_multiple_subcommands.py
index 7c31066a..99d4bf79 100644
--- a/examples/02_nesting/03_multiple_subcommands.py
+++ b/examples/02_nesting/03_multiple_subcommands.py
@@ -6,7 +6,8 @@
`python ./03_multiple_subcommands.py`
`python ./03_multiple_subcommands.py --help`
`python ./03_multiple_subcommands.py dataset:mnist --help`
-`python ./03_multiple_subcommands.py dataset:mnist optimizer:adam --optimizer.learning-rate 3e-4`
+`python ./03_multiple_subcommands.py dataset:mnist optimizer:adam --help`
+`python ./03_multiple_subcommands.py dataset:mnist optimizer:adam --optimizer.learning-rate 3e-4 --dataset.binary`
"""
from __future__ import annotations
@@ -47,6 +48,7 @@ class Sgd:
# Train script.
+@tyro.conf.configure(tyro.conf.ConsolidateSubcommandArgs)
def train(
dataset: Union[Mnist, ImageNet] = Mnist(),
optimizer: Union[Adam, Sgd] = Adam(),
diff --git a/pyproject.toml b/pyproject.toml
index 7343a987..1d869fc0 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[tool.poetry]
name = "tyro"
-version = "0.3.33"
+version = "0.3.34"
description = "Strongly typed, zero-effort CLI interfaces"
authors = ["brentyi "]
include = ["./tyro/**/*"]
diff --git a/tyro/_parsers.py b/tyro/_parsers.py
index 99aa0083..3b7e7ff5 100644
--- a/tyro/_parsers.py
+++ b/tyro/_parsers.py
@@ -241,6 +241,9 @@ def apply(
else:
self.apply_args(parser)
+ # Break some API boundaries to rename the "optional arguments" => "arguments".
+ parser._action_groups[1].title = "arguments"
+
return leaves
def apply_args(self, parser: argparse.ArgumentParser) -> None:
@@ -250,17 +253,18 @@ def apply_args(self, parser: argparse.ArgumentParser) -> None:
parser.description = self.description
# Make argument groups.
- def format_group_name(nested_field_name: str) -> str:
- return (nested_field_name + " arguments").strip()
+ def format_group_name(prefix: str) -> str:
+ return (prefix + " arguments").strip()
group_from_prefix: Dict[str, argparse._ArgumentGroup] = {
"": parser._action_groups[1],
+ **{
+ cast(str, group.title).partition(" ")[0]: group
+ for group in parser._action_groups[2:]
+ },
}
-
- # Break some API boundaries to rename the optional group.
- parser._action_groups[1].title = format_group_name("")
- positional_group = parser.add_argument_group("positional arguments")
- parser._action_groups = parser._action_groups[::-1]
+ positional_group = parser._action_groups[0]
+ assert positional_group.title == "positional arguments"
# Add each argument group. Note that groups with only suppressed arguments won't
# be added.