refact: use to_ instead of as_ for function name.
Some checks failed
Run Unit Tests / Run-Unit-Tests (push) Failing after -24s

This commit is contained in:
jackfiled 2024-11-10 14:56:21 +08:00
parent 0107e82f19
commit 368557cb17
2 changed files with 16 additions and 14 deletions

View File

@ -52,7 +52,7 @@ fn integer_node_parser(
if let LexicalTokenType::ConstInteger(number) = t.span[0].token_type {
SyntaxNode::const_integer(number)
} else {
panic!("Illegal integer constant: {}", t.as_str())
panic!("Illegal integer constant: {}", t.to_string())
}
})(cursor)
}
@ -64,7 +64,7 @@ fn float_node_parser(
if let LexicalTokenType::ConstFloat(number) = t.span[0].token_type {
SyntaxNode::const_float(number)
} else {
panic!("Illegal float constant: {}", t.as_str())
panic!("Illegal float constant: {}", t.to_string())
}
})(cursor)
}
@ -73,12 +73,12 @@ fn literal_string_node_parser(
cursor: LexicalTokenSpan,
) -> IResult<LexicalTokenSpan, Rc<RefCell<SyntaxNode>>> {
map(lexical!(String), |t: LexicalTokenSpan| {
SyntaxNode::literal_string(t.as_str())
SyntaxNode::literal_string(t.to_string())
})(cursor)
}
fn identifier_parser(cursor: LexicalTokenSpan) -> IResult<LexicalTokenSpan, String> {
map(lexical!(Identifier), |t: LexicalTokenSpan| t.as_str())(cursor)
map(lexical!(Identifier), |t: LexicalTokenSpan| t.to_string())(cursor)
}
type LeftValueParseType<'a> = (
@ -144,6 +144,8 @@ fn primary_parser(curser: LexicalTokenSpan) -> IResult<LexicalTokenSpan, Rc<RefC
))(curser)
}
type UnaryParseType<'a> = (LexicalTokenSpan<'a>, Rc<RefCell<SyntaxNode>>);
fn unary_parser(cursor: LexicalTokenSpan) -> IResult<LexicalTokenSpan, Rc<RefCell<SyntaxNode>>> {
// unary_parser -> primary_parser | (+ | - | !) unary_parser
alt((
@ -157,8 +159,8 @@ fn unary_parser(cursor: LexicalTokenSpan) -> IResult<LexicalTokenSpan, Rc<RefCel
)),
unary_parser,
)),
|(token_span, expression)| {
let token_span_string = token_span.as_str();
|(token_span, expression): UnaryParseType| {
let token_span_string = token_span.to_string();
let operator = match &token_span_string[..] {
"+" => UnaryNodeType::Plus,
"-" => UnaryNodeType::Minus,
@ -196,7 +198,7 @@ fn multiply_parser(cursor: LexicalTokenSpan) -> IResult<LexicalTokenSpan, Rc<Ref
let mut node = first;
for (token_span, expression) in others {
let str = token_span.as_str();
let str = token_span.to_string();
let operator = match &str[..] {
"*" => BinaryNodeType::Multiply,
"/" => BinaryNodeType::Divide,
@ -226,7 +228,7 @@ fn add_parser(cursor: LexicalTokenSpan) -> IResult<LexicalTokenSpan, Rc<RefCell<
let mut node = first;
for (token_span, expression) in others {
let str = token_span.as_str();
let str = token_span.to_string();
let operator = match &str[..] {
"+" => BinaryNodeType::Add,
@ -262,7 +264,7 @@ fn relation_parser(cursor: LexicalTokenSpan) -> IResult<LexicalTokenSpan, Rc<Ref
let mut node = first;
for (token_span, expression) in others {
let str = token_span.as_str();
let str = token_span.to_string();
let operator = match &str[..] {
">=" => BinaryNodeType::GreaterEqual,
@ -294,7 +296,7 @@ fn equal_parser(cursor: LexicalTokenSpan) -> IResult<LexicalTokenSpan, Rc<RefCel
let mut node = first;
for (token_span, expression) in others {
let str = token_span.as_str();
let str = token_span.to_string();
let operator = match &str[..] {
"==" => BinaryNodeType::Equal,
@ -321,7 +323,7 @@ fn and_parser(cursor: LexicalTokenSpan) -> IResult<LexicalTokenSpan, Rc<RefCell<
let mut node = first;
for (token_span, expression) in others {
let str = token_span.as_str();
let str = token_span.to_string();
let operator = match &str[..] {
"&&" => BinaryNodeType::And,
@ -347,7 +349,7 @@ fn or_parser(cursor: LexicalTokenSpan) -> IResult<LexicalTokenSpan, Rc<RefCell<S
let mut node = first;
for (token_span, expression) in others {
let str = token_span.as_str();
let str = token_span.to_string();
let operator = match &str[..] {
"||" => BinaryNodeType::Or,

View File

@ -11,7 +11,7 @@ impl<'a> LexicalToken<'a> {
}
/// 获得当前词法令牌的字面值
pub fn as_str(&self) -> String {
pub fn to_string(&self) -> String {
self.literal_value.to_owned()
}
}
@ -22,7 +22,7 @@ impl<'a> LexicalTokenSpan<'a> {
}
/// 获词法令牌切片表示的源代码
pub fn as_str(&self) -> String {
pub fn to_string(&self) -> String {
self.span.iter().map(|token| token.literal_value).collect()
}
}