Skip to content

Commit 2ceae10

Browse files
authored
Merge pull request #59: allow # in IDs
2 parents 7ec0556 + 320deb5 commit 2ceae10

File tree

3 files changed

+7
-3
lines changed

3 files changed

+7
-3
lines changed

.travis.yml

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,10 @@ python:
88
install:
99
- "pip install ."
1010
- "pip install pytest"
11-
- "pip install coverage coveralls"
11+
- "pip install coveralls"
12+
# Coveralls 4.0 doesn't support Python 3.2
13+
- if [ "$TRAVIS_PYTHON_VERSION" == "3.2" ]; then pip install coverage==3.7.1; fi
14+
- if [ "$TRAVIS_PYTHON_VERSION" != "3.2" ]; then pip install coverage; fi
1215
script: coverage run setup.py test
1316
after_success:
1417
- coveralls

jsonpath_rw/lexer.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ def tokenize(self, string):
6161
t_ignore = ' \t'
6262

6363
def t_ID(self, t):
64-
r'[a-zA-Z_@][a-zA-Z0-9_@\-]*'
64+
r'[a-zA-Z_@#][a-zA-Z0-9_@\-]*'
6565
t.type = self.reserved_words.get(t.value, 'ID')
6666
return t
6767

tests/test_lexer.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -53,6 +53,7 @@ def test_simple_inputs(self):
5353
self.assert_lex_equiv('`this`', [self.token('this', 'NAMED_OPERATOR')])
5454
self.assert_lex_equiv('|', [self.token('|', '|')])
5555
self.assert_lex_equiv('where', [self.token('where', 'WHERE')])
56+
self.assert_lex_equiv('a.#text', [self.token('a', 'ID'), self.token('.', '.'), self.token('#text', 'ID')])
5657

5758
def test_basic_errors(self):
5859
def tokenize(s):
@@ -66,4 +67,4 @@ def tokenize(s):
6667
self.assertRaises(JsonPathLexerError, tokenize, '"`')
6768
self.assertRaises(JsonPathLexerError, tokenize, "'`")
6869
self.assertRaises(JsonPathLexerError, tokenize, '?')
69-
self.assertRaises(JsonPathLexerError, tokenize, '$.foo.bar.#')
70+
self.assertRaises(JsonPathLexerError, tokenize, '$.foo.bar.%')

0 commit comments

Comments
 (0)