use self :: Token :: * ; use crate :: cow_rc_str :: CowRcStr ; use crate :: parser :: ParserState ; use matches :: matches ; use std :: char ; use std :: i32 ; use std :: ops :: Range ; # [doc = " One of the pieces the CSS input is broken into."] # [doc = ""] # [doc = " Some components use `Cow` in order to borrow from the original input string"] # [doc = " and avoid allocating/copying when possible."] # [derive (PartialEq , Debug , Clone)] pub enum Token < 'a > { # [doc = " A [``](https://drafts.csswg.org/css-syntax/#ident-token-diagram)"] Ident (CowRcStr < 'a >) , # [doc = " A [``](https://drafts.csswg.org/css-syntax/#at-keyword-token-diagram)"] # [doc = ""] # [doc = " The value does not include the `@` marker."] AtKeyword (CowRcStr < 'a >) , # [doc = " A [``](https://drafts.csswg.org/css-syntax/#hash-token-diagram) with the type flag set to \"unrestricted\""] # [doc = ""] # [doc = " The value does not include the `#` marker."] Hash (CowRcStr < 'a >) , # [doc = " A [``](https://drafts.csswg.org/css-syntax/#hash-token-diagram) with the type flag set to \"id\""] # [doc = ""] # [doc = " The value does not include the `#` marker."] IDHash (CowRcStr < 'a >) , # [doc = " A [``](https://drafts.csswg.org/css-syntax/#string-token-diagram)"] # [doc = ""] # [doc = " The value does not include the quotes."] QuotedString (CowRcStr < 'a >) , # [doc = " A [``](https://drafts.csswg.org/css-syntax/#url-token-diagram)"] # [doc = ""] # [doc = " The value does not include the `url(` `)` markers. Note that `url( )` is represented by a"] # [doc = " `Function` token."] UnquotedUrl (CowRcStr < 'a >) , # [doc = " A ``"] Delim (char) , # [doc = " A [``](https://drafts.csswg.org/css-syntax/#number-token-diagram)"] Number { # [doc = " Whether the number had a `+` or `-` sign."] # [doc = ""] # [doc = " This is used is some cases like the micro syntax. (See the `parse_nth` function.)"] has_sign : bool , # [doc = " The value as a float"] value : f32 , # [doc = " If the origin source did not include a fractional part, the value as an integer."] int_value : Option < i32 > , } , # [doc = " A [``](https://drafts.csswg.org/css-syntax/#percentage-token-diagram)"] Percentage { # [doc = " Whether the number had a `+` or `-` sign."] has_sign : bool , # [doc = " The value as a float, divided by 100 so that the nominal range is 0.0 to 1.0."] unit_value : f32 , # [doc = " If the origin source did not include a fractional part, the value as an integer."] # [doc = " It is **not** divided by 100."] int_value : Option < i32 > , } , # [doc = " A [``](https://drafts.csswg.org/css-syntax/#dimension-token-diagram)"] Dimension { # [doc = " Whether the number had a `+` or `-` sign."] # [doc = ""] # [doc = " This is used is some cases like the micro syntax. (See the `parse_nth` function.)"] has_sign : bool , # [doc = " The value as a float"] value : f32 , # [doc = " If the origin source did not include a fractional part, the value as an integer."] int_value : Option < i32 > , # [doc = " The unit, e.g. \"px\" in `12px`"] unit : CowRcStr < 'a > , } , # [doc = " A [``](https://drafts.csswg.org/css-syntax/#whitespace-token-diagram)"] WhiteSpace (& 'a str) , # [doc = " A comment."] # [doc = ""] # [doc = " The CSS Syntax spec does not generate tokens for comments,"] # [doc = " But we do, because we can (borrowed &str makes it cheap)."] # [doc = ""] # [doc = " The value does not include the `/*` `*/` markers."] Comment (& 'a str) , # [doc = " A `:` ``"] Colon , # [doc = " A `;` ``"] Semicolon , # [doc = " A `,` ``"] Comma , # [doc = " A `~=` [``](https://drafts.csswg.org/css-syntax/#include-match-token-diagram)"] IncludeMatch , # [doc = " A `|=` [``](https://drafts.csswg.org/css-syntax/#dash-match-token-diagram)"] DashMatch , # [doc = " A `^=` [``](https://drafts.csswg.org/css-syntax/#prefix-match-token-diagram)"] PrefixMatch , # [doc = " A `$=` [``](https://drafts.csswg.org/css-syntax/#suffix-match-token-diagram)"] SuffixMatch , # [doc = " A `*=` [``](https://drafts.csswg.org/css-syntax/#substring-match-token-diagram)"] SubstringMatch , # [doc = " A `` [``](https://drafts.csswg.org/css-syntax/#CDC-token-diagram)"] CDC , # [doc = " A [``](https://drafts.csswg.org/css-syntax/#function-token-diagram)"] # [doc = ""] # [doc = " The value (name) does not include the `(` marker."] Function (CowRcStr < 'a >) , # [doc = " A `<(-token>`"] ParenthesisBlock , # [doc = " A `<[-token>`"] SquareBracketBlock , # [doc = " A `<{-token>`"] CurlyBracketBlock , # [doc = " A ``"] # [doc = ""] # [doc = " This token always indicates a parse error."] BadUrl (CowRcStr < 'a >) , # [doc = " A ``"] # [doc = ""] # [doc = " This token always indicates a parse error."] BadString (CowRcStr < 'a >) , # [doc = " A `<)-token>`"] # [doc = ""] # [doc = " When obtained from one of the `Parser::next*` methods,"] # [doc = " this token is always unmatched and indicates a parse error."] CloseParenthesis , # [doc = " A `<]-token>`"] # [doc = ""] # [doc = " When obtained from one of the `Parser::next*` methods,"] # [doc = " this token is always unmatched and indicates a parse error."] CloseSquareBracket , # [doc = " A `<}-token>`"] # [doc = ""] # [doc = " When obtained from one of the `Parser::next*` methods,"] # [doc = " this token is always unmatched and indicates a parse error."] CloseCurlyBracket , } impl < 'a > Token < 'a > { # [doc = " Return whether this token represents a parse error."] # [doc = ""] # [doc = " `BadUrl` and `BadString` are tokenizer-level parse errors."] # [doc = ""] # [doc = " `CloseParenthesis`, `CloseSquareBracket`, and `CloseCurlyBracket` are *unmatched*"] # [doc = " and therefore parse errors when returned by one of the `Parser::next*` methods."] pub fn is_parse_error (& self) -> bool { matches ! (* self , BadUrl (_) | BadString (_) | CloseParenthesis | CloseSquareBracket | CloseCurlyBracket) } } # [derive (Clone)] pub struct Tokenizer < 'a > { input : & 'a str , # [doc = " Counted in bytes, not code points. From 0."] position : usize , # [doc = " The position at the start of the current line; but adjusted to"] # [doc = " ensure that computing the column will give the result in units"] # [doc = " of UTF-16 characters."] current_line_start_position : usize , current_line_number : u32 , var_or_env_functions : SeenStatus , source_map_url : Option < & 'a str > , source_url : Option < & 'a str > , } # [derive (Copy , Clone , PartialEq , Eq)] enum SeenStatus { DontCare , LookingForThem , SeenAtLeastOne , } impl < 'a > Tokenizer < 'a > { # [inline] pub fn new (input : & str) -> Tokenizer { Tokenizer :: with_first_line_number (input , 0) } # [inline] pub fn with_first_line_number (input : & str , first_line_number : u32) -> Tokenizer { Tokenizer { input : input , position : 0 , current_line_start_position : 0 , current_line_number : first_line_number , var_or_env_functions : SeenStatus :: DontCare , source_map_url : None , source_url : None , } } # [inline] pub fn look_for_var_or_env_functions (& mut self) { self . var_or_env_functions = SeenStatus :: LookingForThem ; } # [inline] pub fn seen_var_or_env_functions (& mut self) -> bool { let seen = self . var_or_env_functions == SeenStatus :: SeenAtLeastOne ; self . var_or_env_functions = SeenStatus :: DontCare ; seen } # [inline] pub fn see_function (& mut self , name : & str) { if self . var_or_env_functions == SeenStatus :: LookingForThem { if name . eq_ignore_ascii_case ("var") || name . eq_ignore_ascii_case ("env") { self . var_or_env_functions = SeenStatus :: SeenAtLeastOne ; } } } # [inline] pub fn next (& mut self) -> Result < Token < 'a > , () > { next_token (self) } # [inline] pub fn position (& self) -> SourcePosition { SourcePosition (self . position) } # [inline] pub fn current_source_location (& self) -> SourceLocation { SourceLocation { line : self . current_line_number , column : (self . position - self . current_line_start_position + 1) as u32 , } } # [inline] pub fn current_source_map_url (& self) -> Option < & 'a str > { self . source_map_url } # [inline] pub fn current_source_url (& self) -> Option < & 'a str > { self . source_url } # [inline] pub fn state (& self) -> ParserState { ParserState { position : self . position , current_line_start_position : self . current_line_start_position , current_line_number : self . current_line_number , at_start_of : None , } } # [inline] pub fn reset (& mut self , state : & ParserState) { self . position = state . position ; self . current_line_start_position = state . current_line_start_position ; self . current_line_number = state . current_line_number ; } # [inline] pub fn slice_from (& self , start_pos : SourcePosition) -> & 'a str { & self . input [start_pos . 0 .. self . position] } # [inline] pub fn slice (& self , range : Range < SourcePosition >) -> & 'a str { & self . input [range . start . 0 .. range . end . 0] } pub fn current_source_line (& self) -> & 'a str { let current = self . position ; let start = self . input [0 .. current] . rfind (| c | matches ! (c , '\r' | '\n' | '\x0C')) . map_or (0 , | start | start + 1) ; let end = self . input [current ..] . find (| c | matches ! (c , '\r' | '\n' | '\x0C')) . map_or (self . input . len () , | end | current + end) ; & self . input [start .. end] } # [inline] pub fn next_byte (& self) -> Option < u8 > { if self . is_eof () { None } else { Some (self . input . as_bytes () [self . position]) } } # [inline] fn is_eof (& self) -> bool { ! self . has_at_least (0) } # [inline] fn has_at_least (& self , n : usize) -> bool { self . position + n < self . input . len () } # [inline] pub fn advance (& mut self , n : usize) { if cfg ! (debug_assertions) { for i in 0 .. n { let b = self . byte_at (i) ; debug_assert ! (b . is_ascii () || (b & 0xF0 != 0xF0 && b & 0xC0 != 0x80)) ; debug_assert ! (b != b'\r' && b != b'\n' && b != b'\x0C') ; } } self . position += n } # [inline] fn next_byte_unchecked (& self) -> u8 { self . byte_at (0) } # [inline] fn byte_at (& self , offset : usize) -> u8 { self . input . as_bytes () [self . position + offset] } # [inline] fn consume_4byte_intro (& mut self) { debug_assert ! (self . next_byte_unchecked () & 0xF0 == 0xF0) ; self . current_line_start_position = self . current_line_start_position . wrapping_sub (1) ; self . position += 1 ; } # [inline] fn consume_continuation_byte (& mut self) { debug_assert ! (self . next_byte_unchecked () & 0xC0 == 0x80) ; self . current_line_start_position = self . current_line_start_position . wrapping_add (1) ; self . position += 1 ; } # [inline (never)] fn consume_known_byte (& mut self , byte : u8) { debug_assert ! (byte != b'\r' && byte != b'\n' && byte != b'\x0C') ; self . position += 1 ; if byte & 0xF0 == 0xF0 { self . current_line_start_position = self . current_line_start_position . wrapping_sub (1) ; } else if byte & 0xC0 == 0x80 { self . current_line_start_position = self . current_line_start_position . wrapping_add (1) ; } } # [inline] fn next_char (& self) -> char { self . input [self . position ..] . chars () . next () . unwrap () } # [inline] fn consume_newline (& mut self) { let byte = self . next_byte_unchecked () ; debug_assert ! (byte == b'\r' || byte == b'\n' || byte == b'\x0C') ; self . position += 1 ; if byte == b'\r' && self . next_byte () == Some (b'\n') { self . position += 1 ; } self . current_line_start_position = self . position ; self . current_line_number += 1 ; } # [inline] fn has_newline_at (& self , offset : usize) -> bool { self . position + offset < self . input . len () && matches ! (self . byte_at (offset) , b'\n' | b'\r' | b'\x0C') } # [inline] fn consume_char (& mut self) -> char { let c = self . next_char () ; let len_utf8 = c . len_utf8 () ; self . position += len_utf8 ; self . current_line_start_position = self . current_line_start_position . wrapping_add (len_utf8 - c . len_utf16 ()) ; c } # [inline] fn starts_with (& self , needle : & [u8]) -> bool { self . input . as_bytes () [self . position ..] . starts_with (needle) } pub fn skip_whitespace (& mut self) { while ! self . is_eof () { { enum Case { Case1 = 1isize , Case2 = 2isize , Case3 = 3isize , Case4 = 4isize } static __CASES : [Case ; 256] = [Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case1 , Case :: Case2 , Case :: Case4 , Case :: Case2 , Case :: Case2 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case1 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case3 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4 , Case :: Case4] ; match __CASES [self . next_byte_unchecked () as usize] { Case :: Case1 => { { self . advance (1) } } , Case :: Case2 => { { self . consume_newline () ; } } , Case :: Case3 => { { if self . starts_with (b"/*") { consume_comment (self) ; } else { return } } } , Case :: Case4 => { { return } } } } } } pub fn skip_cdc_and_cdo (& mut self) { while ! self . is_eof () { { enum Case { Case1 = 1isize , Case2 = 2isize , Case3 = 3isize , Case4 = 4isize , Case5 = 5isize , Case6 = 6isize } static __CASES : [Case ; 256] = [Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case1 , Case :: Case2 , Case :: Case6 , Case :: Case2 , Case :: Case2 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case1 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case5 , Case :: Case6 , Case :: Case3 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case4 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6 , Case :: Case6] ; match __CASES [self . next_byte_unchecked () as usize] { Case :: Case1 => { { self . advance (1) } } , Case :: Case2 => { { self . consume_newline () ; } } , Case :: Case3 => { { if self . starts_with (b"/*") { consume_comment (self) ; } else { return } } } , Case :: Case4 => { { if self . starts_with (b"") { self . advance (3) } else { return } } } , Case :: Case6 => { { return } } } } } } } # [doc = " A position from the start of the input, counted in UTF-8 bytes."] # [derive (PartialEq , Eq , PartialOrd , Ord , Debug , Clone , Copy)] pub struct SourcePosition (pub (crate) usize) ; impl SourcePosition { # [doc = " Returns the current byte index in the original input."] # [inline] pub fn byte_index (& self) -> usize { self . 0 } } # [doc = " The line and column number for a given position within the input."] # [derive (PartialEq , Eq , Debug , Clone , Copy)] pub struct SourceLocation { # [doc = " The line number, starting at 0 for the first line, unless `with_first_line_number` was used."] pub line : u32 , # [doc = " The column number within a line, starting at 1 for first the character of the line."] # [doc = " Column numbers are counted in UTF-16 code units."] pub column : u32 , } fn next_token < 'a > (tokenizer : & mut Tokenizer < 'a >) -> Result < Token < 'a > , () > { if tokenizer . is_eof () { return Err (()) ; } let b = tokenizer . next_byte_unchecked () ; let token = { enum Case { Case1 = 1isize , Case2 = 2isize , Case3 = 3isize , Case4 = 4isize , Case5 = 5isize , Case6 = 6isize , Case7 = 7isize , Case8 = 8isize , Case9 = 9isize , Case10 = 10isize , Case11 = 11isize , Case12 = 12isize , Case13 = 13isize , Case14 = 14isize , Case15 = 15isize , Case16 = 16isize , Case17 = 17isize , Case18 = 18isize , Case19 = 19isize , Case20 = 20isize , Case21 = 21isize , Case22 = 22isize , Case23 = 23isize , Case24 = 24isize , Case25 = 25isize , Case26 = 26isize , Case27 = 27isize , Case28 = 28isize , Case29 = 29isize } static __CASES : [Case ; 256] = [Case :: Case20 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case1 , Case :: Case2 , Case :: Case29 , Case :: Case2 , Case :: Case2 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case1 , Case :: Case29 , Case :: Case3 , Case :: Case4 , Case :: Case5 , Case :: Case29 , Case :: Case29 , Case :: Case6 , Case :: Case7 , Case :: Case8 , Case :: Case9 , Case :: Case10 , Case :: Case11 , Case :: Case12 , Case :: Case13 , Case :: Case14 , Case :: Case15 , Case :: Case15 , Case :: Case15 , Case :: Case15 , Case :: Case15 , Case :: Case15 , Case :: Case15 , Case :: Case15 , Case :: Case15 , Case :: Case15 , Case :: Case16 , Case :: Case17 , Case :: Case18 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case19 , Case :: Case20 , Case :: Case20 , Case :: Case20 , Case :: Case20 , Case :: Case20 , Case :: Case20 , Case :: Case20 , Case :: Case20 , Case :: Case20 , Case :: Case20 , Case :: Case20 , Case :: Case20 , Case :: Case20 , Case :: Case20 , Case :: Case20 , Case :: Case20 , Case :: Case20 , Case :: Case20 , Case :: Case20 , Case :: Case20 , Case :: Case20 , Case :: Case20 , Case :: Case20 , Case :: Case20 , Case :: Case20 , Case :: Case20 , Case :: Case21 , Case :: Case22 , Case :: Case23 , Case :: Case24 , Case :: Case20 , Case :: Case29 , Case :: Case20 , Case :: Case20 , Case :: Case20 , Case :: Case20 , Case :: Case20 , Case :: Case20 , Case :: Case20 , Case :: Case20 , Case :: Case20 , Case :: Case20 , Case :: Case20 , Case :: Case20 , Case :: Case20 , Case :: Case20 , Case :: Case20 , Case :: Case20 , Case :: Case20 , Case :: Case20 , Case :: Case20 , Case :: Case20 , Case :: Case20 , Case :: Case20 , Case :: Case20 , Case :: Case20 , Case :: Case20 , Case :: Case20 , Case :: Case25 , Case :: Case26 , Case :: Case27 , Case :: Case28 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29 , Case :: Case29] ; match __CASES [b as usize] { Case :: Case1 => { { consume_whitespace (tokenizer , false) } } , Case :: Case2 => { { consume_whitespace (tokenizer , true) } } , Case :: Case3 => { { consume_string (tokenizer , false) } } , Case :: Case4 => { { tokenizer . advance (1) ; if is_ident_start (tokenizer) { IDHash (consume_name (tokenizer)) } else if ! tokenizer . is_eof () && match tokenizer . next_byte_unchecked () { b'0' ..= b'9' | b'-' => true , _ => false , } { Hash (consume_name (tokenizer)) } else { Delim ('#') } } } , Case :: Case5 => { { if tokenizer . starts_with (b"$=") { tokenizer . advance (2) ; SuffixMatch } else { tokenizer . advance (1) ; Delim ('$') } } } , Case :: Case6 => { { consume_string (tokenizer , true) } } , Case :: Case7 => { { tokenizer . advance (1) ; ParenthesisBlock } } , Case :: Case8 => { { tokenizer . advance (1) ; CloseParenthesis } } , Case :: Case9 => { { if tokenizer . starts_with (b"*=") { tokenizer . advance (2) ; SubstringMatch } else { tokenizer . advance (1) ; Delim ('*') } } } , Case :: Case10 => { { if (tokenizer . has_at_least (1) && matches ! (tokenizer . byte_at (1) , b'0' ..= b'9')) || (tokenizer . has_at_least (2) && tokenizer . byte_at (1) == b'.' && matches ! (tokenizer . byte_at (2) , b'0' ..= b'9')) { consume_numeric (tokenizer) } else { tokenizer . advance (1) ; Delim ('+') } } } , Case :: Case11 => { { tokenizer . advance (1) ; Comma } } , Case :: Case12 => { { if (tokenizer . has_at_least (1) && matches ! (tokenizer . byte_at (1) , b'0' ..= b'9')) || (tokenizer . has_at_least (2) && tokenizer . byte_at (1) == b'.' && matches ! (tokenizer . byte_at (2) , b'0' ..= b'9')) { consume_numeric (tokenizer) } else if tokenizer . starts_with (b"-->") { tokenizer . advance (3) ; CDC } else if is_ident_start (tokenizer) { consume_ident_like (tokenizer) } else { tokenizer . advance (1) ; Delim ('-') } } } , Case :: Case13 => { { if tokenizer . has_at_least (1) && matches ! (tokenizer . byte_at (1) , b'0' ..= b'9') { consume_numeric (tokenizer) } else { tokenizer . advance (1) ; Delim ('.') } } } , Case :: Case14 => { { if tokenizer . starts_with (b"/*") { Comment (consume_comment (tokenizer)) } else { tokenizer . advance (1) ; Delim ('/') } } } , Case :: Case15 => { { consume_numeric (tokenizer) } } , Case :: Case16 => { { tokenizer . advance (1) ; Colon } } , Case :: Case17 => { { tokenizer . advance (1) ; Semicolon } } , Case :: Case18 => { { if tokenizer . starts_with (b"