Skip to content

Commit 68f7667

Browse files
authored
perf(es/parser): Replace byte arguments with generics (#10370)
1 parent a2d3596 commit 68f7667

File tree

2 files changed

+35
-37
lines changed

2 files changed

+35
-37
lines changed

crates/swc_ecma_parser/src/lexer/mod.rs

Lines changed: 25 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -330,15 +330,15 @@ impl<'a> Lexer<'a> {
330330
///
331331
/// This is extracted as a method to reduce size of `read_token`.
332332
#[inline(never)]
333-
fn read_token_logical(&mut self, c: u8) -> LexResult<Token> {
333+
fn read_token_logical<const C: u8>(&mut self) -> LexResult<Token> {
334334
let had_line_break_before_last = self.had_line_break_before_last();
335335
let start = self.cur_pos();
336336

337337
unsafe {
338338
// Safety: cur() is Some(c as char)
339339
self.input.bump();
340340
}
341-
let token = if c == b'&' {
341+
let token = if C == b'&' {
342342
BinOpToken::BitAnd
343343
} else {
344344
BinOpToken::BitOr
@@ -354,7 +354,7 @@ impl<'a> Lexer<'a> {
354354
}
355355

356356
// '||', '&&'
357-
if self.input.cur() == Some(c as char) {
357+
if self.input.cur() == Some(C as char) {
358358
unsafe {
359359
// Safety: cur() is Some(c)
360360
self.input.bump();
@@ -396,8 +396,8 @@ impl<'a> Lexer<'a> {
396396
///
397397
/// This is extracted as a method to reduce size of `read_token`.
398398
#[inline(never)]
399-
fn read_token_mul_mod(&mut self, c: u8) -> LexResult<Token> {
400-
let is_mul = c == b'*';
399+
fn read_token_mul_mod<const C: u8>(&mut self) -> LexResult<Token> {
400+
let is_mul = C == b'*';
401401
unsafe {
402402
// Safety: cur() is Some(c)
403403
self.input.bump();
@@ -552,7 +552,7 @@ impl<'a> Lexer<'a> {
552552
Ok(Some(vec![c.into()]))
553553
}
554554

555-
fn read_token_plus_minus(&mut self, c: u8) -> LexResult<Option<Token>> {
555+
fn read_token_plus_minus<const C: u8>(&mut self) -> LexResult<Option<Token>> {
556556
let start = self.cur_pos();
557557

558558
unsafe {
@@ -561,41 +561,41 @@ impl<'a> Lexer<'a> {
561561
}
562562

563563
// '++', '--'
564-
Ok(Some(if self.input.cur() == Some(c as char) {
564+
Ok(Some(if self.input.cur() == Some(C as char) {
565565
unsafe {
566566
// Safety: cur() is Some(c)
567567
self.input.bump();
568568
}
569569

570570
// Handle -->
571-
if self.state.had_line_break && c == b'-' && self.eat(b'>') {
571+
if self.state.had_line_break && C == b'-' && self.eat(b'>') {
572572
self.emit_module_mode_error(start, SyntaxError::LegacyCommentInModule);
573573
self.skip_line_comment(0);
574574
self.skip_space::<true>();
575575
return self.read_token();
576576
}
577577

578-
if c == b'+' {
578+
if C == b'+' {
579579
Token::PlusPlus
580580
} else {
581581
Token::MinusMinus
582582
}
583583
} else if self.input.eat_byte(b'=') {
584-
Token::AssignOp(if c == b'+' {
584+
Token::AssignOp(if C == b'+' {
585585
AssignOp::AddAssign
586586
} else {
587587
AssignOp::SubAssign
588588
})
589589
} else {
590-
Token::BinOp(if c == b'+' {
590+
Token::BinOp(if C == b'+' {
591591
BinOpToken::Add
592592
} else {
593593
BinOpToken::Sub
594594
})
595595
}))
596596
}
597597

598-
fn read_token_bang_or_eq(&mut self, c: u8) -> LexResult<Option<Token>> {
598+
fn read_token_bang_or_eq<const C: u8>(&mut self) -> LexResult<Option<Token>> {
599599
let start = self.cur_pos();
600600
let had_line_break_before_last = self.had_line_break_before_last();
601601

@@ -608,7 +608,7 @@ impl<'a> Lexer<'a> {
608608
// "=="
609609

610610
if self.input.eat_byte(b'=') {
611-
if c == b'!' {
611+
if C == b'!' {
612612
Token::BinOp(BinOpToken::NotEqEq)
613613
} else {
614614
// =======
@@ -622,16 +622,16 @@ impl<'a> Lexer<'a> {
622622

623623
Token::BinOp(BinOpToken::EqEqEq)
624624
}
625-
} else if c == b'!' {
625+
} else if C == b'!' {
626626
Token::BinOp(BinOpToken::NotEq)
627627
} else {
628628
Token::BinOp(BinOpToken::EqEq)
629629
}
630-
} else if c == b'=' && self.input.eat_byte(b'>') {
630+
} else if C == b'=' && self.input.eat_byte(b'>') {
631631
// "=>"
632632

633633
Token::Arrow
634-
} else if c == b'!' {
634+
} else if C == b'!' {
635635
Token::Bang
636636
} else {
637637
Token::AssignOp(AssignOp::Assign)
@@ -655,51 +655,49 @@ impl Lexer<'_> {
655655
}
656656

657657
#[inline(never)]
658-
fn read_token_lt_gt(&mut self) -> LexResult<Option<Token>> {
659-
debug_assert!(self.cur() == Some('<') || self.cur() == Some('>'));
660-
658+
fn read_token_lt_gt<const C: u8>(&mut self) -> LexResult<Option<Token>> {
661659
let had_line_break_before_last = self.had_line_break_before_last();
662660
let start = self.cur_pos();
663-
let c = self.cur().unwrap();
664661
self.bump();
665662

666663
if self.syntax.typescript()
667664
&& self.ctx.contains(Context::InType)
668665
&& !self.ctx.contains(Context::ShouldNotLexLtOrGtAsType)
669666
{
670-
if c == '<' {
667+
if C == b'<' {
671668
return Ok(Some(tok!('<')));
672-
} else if c == '>' {
669+
} else if C == b'>' {
673670
return Ok(Some(tok!('>')));
674671
}
675672
}
676673

677674
// XML style comment. `<!--`
678-
if c == '<' && self.is(b'!') && self.peek() == Some('-') && self.peek_ahead() == Some('-') {
675+
if C == b'<' && self.is(b'!') && self.peek() == Some('-') && self.peek_ahead() == Some('-')
676+
{
679677
self.skip_line_comment(3);
680678
self.skip_space::<true>();
681679
self.emit_module_mode_error(start, SyntaxError::LegacyCommentInModule);
682680

683681
return self.read_token();
684682
}
685683

686-
let mut op = if c == '<' {
684+
let mut op = if C == b'<' {
687685
BinOpToken::Lt
688686
} else {
689687
BinOpToken::Gt
690688
};
691689

692690
// '<<', '>>'
693-
if self.cur() == Some(c) {
691+
if self.cur() == Some(C as char) {
694692
self.bump();
695-
op = if c == '<' {
693+
op = if C == b'<' {
696694
BinOpToken::LShift
697695
} else {
698696
BinOpToken::RShift
699697
};
700698

701699
//'>>>'
702-
if c == '>' && self.cur() == Some(c) {
700+
if C == b'>' && self.cur() == Some(C as char) {
703701
self.bump();
704702
op = BinOpToken::ZeroFillRShift;
705703
}

crates/swc_ecma_parser/src/lexer/table.rs

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -321,19 +321,19 @@ const UNI: ByteHandler = Some(|lexer| {
321321
const COL: ByteHandler = Some(|lexer| lexer.read_token_colon().map(Some));
322322

323323
/// `%`
324-
const PRC: ByteHandler = Some(|lexer| lexer.read_token_mul_mod(b'%').map(Some));
324+
const PRC: ByteHandler = Some(|lexer| lexer.read_token_mul_mod::<b'%'>().map(Some));
325325

326326
/// `*`
327-
const ATR: ByteHandler = Some(|lexer| lexer.read_token_mul_mod(b'*').map(Some));
327+
const ATR: ByteHandler = Some(|lexer| lexer.read_token_mul_mod::<b'*'>().map(Some));
328328

329329
/// `?`
330330
const QST: ByteHandler = Some(|lexer| lexer.read_token_question_mark().map(Some));
331331

332332
/// `&`
333-
const AMP: ByteHandler = Some(|lexer| lexer.read_token_logical(b'&').map(Some));
333+
const AMP: ByteHandler = Some(|lexer| lexer.read_token_logical::<b'&'>().map(Some));
334334

335335
/// `|`
336-
const PIP: ByteHandler = Some(|lexer| lexer.read_token_logical(b'|').map(Some));
336+
const PIP: ByteHandler = Some(|lexer| lexer.read_token_logical::<b'|'>().map(Some));
337337

338338
macro_rules! single_char {
339339
($name:ident, $c:literal, $token:ident) => {
@@ -372,25 +372,25 @@ const CRT: ByteHandler = Some(|lexer| {
372372
});
373373

374374
/// `+`
375-
const PLS: ByteHandler = Some(|lexer| lexer.read_token_plus_minus(b'+'));
375+
const PLS: ByteHandler = Some(|lexer| lexer.read_token_plus_minus::<b'+'>());
376376

377377
/// `-`
378-
const MIN: ByteHandler = Some(|lexer| lexer.read_token_plus_minus(b'-'));
378+
const MIN: ByteHandler = Some(|lexer| lexer.read_token_plus_minus::<b'-'>());
379379

380380
/// `!`
381-
const EXL: ByteHandler = Some(|lexer| lexer.read_token_bang_or_eq(b'!'));
381+
const EXL: ByteHandler = Some(|lexer| lexer.read_token_bang_or_eq::<b'!'>());
382382

383383
/// `=`
384-
const EQL: ByteHandler = Some(|lexer| lexer.read_token_bang_or_eq(b'='));
384+
const EQL: ByteHandler = Some(|lexer| lexer.read_token_bang_or_eq::<b'='>());
385385

386386
/// `.`
387387
const PRD: ByteHandler = Some(|lexer| lexer.read_token_dot().map(Some));
388388

389389
/// `<`
390-
const LSS: ByteHandler = Some(|lexer| lexer.read_token_lt_gt());
390+
const LSS: ByteHandler = Some(|lexer| lexer.read_token_lt_gt::<b'<'>());
391391

392392
/// `>`
393-
const MOR: ByteHandler = Some(|lexer| lexer.read_token_lt_gt());
393+
const MOR: ByteHandler = Some(|lexer| lexer.read_token_lt_gt::<b'>'>());
394394

395395
/// `/`
396396
const SLH: ByteHandler = Some(|lexer| lexer.read_slash());

0 commit comments

Comments
 (0)
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy