Skip to content

Commit 888151d

Browse files
committed
Expanded Cmd2Lexer to highlight argparse flags in red and values in yellow
We can roll this back if people don't like it.
1 parent 42033b6 commit 888151d

File tree

2 files changed

+50
-6
lines changed

2 files changed

+50
-6
lines changed

cmd2/pt_utils.py

Lines changed: 21 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -197,7 +197,27 @@ def get_line(lineno: int) -> list[tuple[str, str]]:
197197

198198
# Add the rest of the line
199199
if cmd_end < len(line):
200-
tokens.append(('', line[cmd_end:]))
200+
rest = line[cmd_end:]
201+
# Regex to match whitespace, flags, quoted strings, or other words
202+
arg_pattern = re.compile(r'(\s+)|(--?[^\s\'"]+)|("[^"]*"|\'[^\']*\')|([^\s\'"]+)')
203+
204+
# Get redirection tokens and terminators to avoid highlighting them as values
205+
exclude_tokens = set(constants.REDIRECTION_TOKENS)
206+
if hasattr(self.cmd_app, 'statement_parser'):
207+
exclude_tokens.update(self.cmd_app.statement_parser.terminators)
208+
209+
for m in arg_pattern.finditer(rest):
210+
space, flag, quoted, word = m.groups()
211+
text = m.group(0)
212+
213+
if space:
214+
tokens.append(('', text))
215+
elif flag:
216+
tokens.append(('ansired', text))
217+
elif (quoted or word) and text not in exclude_tokens:
218+
tokens.append(('ansiyellow', text))
219+
else:
220+
tokens.append(('', text))
201221
elif line:
202222
# No command match found, add the entire line unstyled
203223
tokens.append(('', line))

tests/test_pt_utils.py

Lines changed: 29 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@ def test_lex_document_command(self, mock_cmd_app):
5050
get_line = lexer.lex_document(document)
5151
tokens = get_line(0)
5252

53-
assert tokens == [('ansigreen', 'help'), ('', ' something')]
53+
assert tokens == [('ansigreen', 'help'), ('', ' '), ('ansiyellow', 'something')]
5454

5555
def test_lex_document_alias(self, mock_cmd_app):
5656
"""Test lexing an alias."""
@@ -62,7 +62,7 @@ def test_lex_document_alias(self, mock_cmd_app):
6262
get_line = lexer.lex_document(document)
6363
tokens = get_line(0)
6464

65-
assert tokens == [('ansicyan', 'ls'), ('', ' -l')]
65+
assert tokens == [('ansicyan', 'ls'), ('', ' '), ('ansired', '-l')]
6666

6767
def test_lex_document_macro(self, mock_cmd_app):
6868
"""Test lexing a macro."""
@@ -74,7 +74,7 @@ def test_lex_document_macro(self, mock_cmd_app):
7474
get_line = lexer.lex_document(document)
7575
tokens = get_line(0)
7676

77-
assert tokens == [('ansimagenta', 'my_macro'), ('', ' arg1')]
77+
assert tokens == [('ansimagenta', 'my_macro'), ('', ' '), ('ansiyellow', 'arg1')]
7878

7979
def test_lex_document_leading_whitespace(self, mock_cmd_app):
8080
"""Test lexing with leading whitespace."""
@@ -86,7 +86,7 @@ def test_lex_document_leading_whitespace(self, mock_cmd_app):
8686
get_line = lexer.lex_document(document)
8787
tokens = get_line(0)
8888

89-
assert tokens == [('', ' '), ('ansigreen', 'help'), ('', ' something')]
89+
assert tokens == [('', ' '), ('ansigreen', 'help'), ('', ' '), ('ansiyellow', 'something')]
9090

9191
def test_lex_document_unknown_command(self, mock_cmd_app):
9292
"""Test lexing an unknown command."""
@@ -97,7 +97,7 @@ def test_lex_document_unknown_command(self, mock_cmd_app):
9797
get_line = lexer.lex_document(document)
9898
tokens = get_line(0)
9999

100-
assert tokens == [('', 'unknown'), ('', ' command')]
100+
assert tokens == [('', 'unknown'), ('', ' '), ('ansiyellow', 'command')]
101101

102102
def test_lex_document_no_command(self, mock_cmd_app):
103103
"""Test lexing an empty line or line with only whitespace."""
@@ -110,6 +110,30 @@ def test_lex_document_no_command(self, mock_cmd_app):
110110

111111
assert tokens == [('', ' ')]
112112

113+
def test_lex_document_arguments(self, mock_cmd_app):
114+
"""Test lexing a command with flags and values."""
115+
mock_cmd_app.all_commands = ["help"]
116+
lexer = pt_utils.Cmd2Lexer(cast(Any, mock_cmd_app))
117+
118+
line = "help -v --name \"John Doe\" > out.txt"
119+
document = Document(line)
120+
get_line = lexer.lex_document(document)
121+
tokens = get_line(0)
122+
123+
assert tokens == [
124+
('ansigreen', 'help'),
125+
('', ' '),
126+
('ansired', '-v'),
127+
('', ' '),
128+
('ansired', '--name'),
129+
('', ' '),
130+
('ansiyellow', '"John Doe"'),
131+
('', ' '),
132+
('', '>'),
133+
('', ' '),
134+
('ansiyellow', 'out.txt'),
135+
]
136+
113137

114138
class TestCmd2Completer:
115139
def test_get_completions_basic(self, mock_cmd_app):

0 commit comments

Comments
 (0)