Merge "tokenizer: do not try to decode strings on Python 3"

This commit is contained in:
Zuul 2020-12-18 16:52:18 +00:00 committed by Gerrit Code Review
commit 119f96f293
1 changed files with 6 additions and 2 deletions

View File

@ -82,12 +82,16 @@ def SearchTokenizer():
def t_SSTRING(t):
r"'([^\\']+|\\'|\\\\)*'"
t.value=t.value[1:-1].decode("string-escape")
t.value = t.value[1:-1]
if not isinstance(t.value, six.text_type):
t.value = t.value.decode('string-escape')
return t
def t_DSTRING(t):
r'"([^\\"]+|\\"|\\\\)*"'
t.value=t.value[1:-1].decode("string-escape")
t.value = t.value[1:-1]
if not isinstance(t.value, six.text_type):
t.value = t.value.decode('string-escape')
return t
def t_AND(t):