1use crate::cow_rc_str::CowRcStr;
6use crate::tokenizer::{SourceLocation, SourcePosition, Token, Tokenizer};
7use smallvec::SmallVec;
8use std::fmt;
9use std::ops::BitOr;
10use std::ops::Range;
11
12#[derive(Debug, Clone)]
18pub struct ParserState {
19 pub(crate) position: usize,
20 pub(crate) current_line_start_position: usize,
21 pub(crate) current_line_number: u32,
22 pub(crate) at_start_of: Option<BlockType>,
23}
24
25impl ParserState {
26 #[inline]
28 pub fn position(&self) -> SourcePosition {
29 SourcePosition(self.position)
30 }
31
32 #[inline]
34 pub fn source_location(&self) -> SourceLocation {
35 SourceLocation {
36 line: self.current_line_number,
37 column: (self.position - self.current_line_start_position + 1) as u32,
38 }
39 }
40}
41
42#[derive(Clone, Copy, Debug, Eq, PartialEq)]
57pub enum ParseUntilErrorBehavior {
58 Consume,
60 Stop,
62}
63
64#[derive(Clone, Debug, PartialEq)]
66pub enum BasicParseErrorKind<'i> {
67 UnexpectedToken(Token<'i>),
69 EndOfInput,
71 AtRuleInvalid(CowRcStr<'i>),
73 AtRuleBodyInvalid,
75 QualifiedRuleInvalid,
77}
78
79impl fmt::Display for BasicParseErrorKind<'_> {
80 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
81 match self {
82 BasicParseErrorKind::UnexpectedToken(token) => {
83 write!(f, "unexpected token: {:?}", token)
84 }
85 BasicParseErrorKind::EndOfInput => write!(f, "unexpected end of input"),
86 BasicParseErrorKind::AtRuleInvalid(rule) => {
87 write!(f, "invalid @ rule encountered: '@{}'", rule)
88 }
89 BasicParseErrorKind::AtRuleBodyInvalid => write!(f, "invalid @ rule body encountered"),
90 BasicParseErrorKind::QualifiedRuleInvalid => {
91 write!(f, "invalid qualified rule encountered")
92 }
93 }
94 }
95}
96
97#[derive(Clone, Debug, PartialEq)]
99pub struct BasicParseError<'i> {
100 pub kind: BasicParseErrorKind<'i>,
102 pub location: SourceLocation,
104}
105
106impl<'i, T> From<BasicParseError<'i>> for ParseError<'i, T> {
107 #[inline]
108 fn from(this: BasicParseError<'i>) -> ParseError<'i, T> {
109 ParseError {
110 kind: ParseErrorKind::Basic(this.kind),
111 location: this.location,
112 }
113 }
114}
115
116impl SourceLocation {
117 #[inline]
119 pub fn new_basic_unexpected_token_error(self, token: Token<'_>) -> BasicParseError<'_> {
120 self.new_basic_error(BasicParseErrorKind::UnexpectedToken(token))
121 }
122
123 #[inline]
125 pub fn new_basic_error(self, kind: BasicParseErrorKind<'_>) -> BasicParseError<'_> {
126 BasicParseError {
127 kind,
128 location: self,
129 }
130 }
131
132 #[inline]
134 pub fn new_unexpected_token_error<E>(self, token: Token<'_>) -> ParseError<'_, E> {
135 self.new_error(BasicParseErrorKind::UnexpectedToken(token))
136 }
137
138 #[inline]
140 pub fn new_error<E>(self, kind: BasicParseErrorKind<'_>) -> ParseError<'_, E> {
141 ParseError {
142 kind: ParseErrorKind::Basic(kind),
143 location: self,
144 }
145 }
146
147 #[inline]
149 pub fn new_custom_error<'i, E1: Into<E2>, E2>(self, error: E1) -> ParseError<'i, E2> {
150 ParseError {
151 kind: ParseErrorKind::Custom(error.into()),
152 location: self,
153 }
154 }
155}
156
157#[derive(Clone, Debug, PartialEq)]
159pub enum ParseErrorKind<'i, T: 'i> {
160 Basic(BasicParseErrorKind<'i>),
162 Custom(T),
164}
165
166impl<'i, T> ParseErrorKind<'i, T> {
167 pub fn into<U>(self) -> ParseErrorKind<'i, U>
169 where
170 T: Into<U>,
171 {
172 match self {
173 ParseErrorKind::Basic(basic) => ParseErrorKind::Basic(basic),
174 ParseErrorKind::Custom(custom) => ParseErrorKind::Custom(custom.into()),
175 }
176 }
177}
178
179impl<E: fmt::Display> fmt::Display for ParseErrorKind<'_, E> {
180 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
181 match self {
182 ParseErrorKind::Basic(ref basic) => basic.fmt(f),
183 ParseErrorKind::Custom(ref custom) => custom.fmt(f),
184 }
185 }
186}
187
188#[derive(Clone, Debug, PartialEq)]
190pub struct ParseError<'i, E> {
191 pub kind: ParseErrorKind<'i, E>,
193 pub location: SourceLocation,
195}
196
197impl<'i, T> ParseError<'i, T> {
198 pub fn basic(self) -> BasicParseError<'i> {
200 match self.kind {
201 ParseErrorKind::Basic(kind) => BasicParseError {
202 kind,
203 location: self.location,
204 },
205 ParseErrorKind::Custom(_) => panic!("Not a basic parse error"),
206 }
207 }
208
209 pub fn into<U>(self) -> ParseError<'i, U>
211 where
212 T: Into<U>,
213 {
214 ParseError {
215 kind: self.kind.into(),
216 location: self.location,
217 }
218 }
219}
220
221impl<E: fmt::Display> fmt::Display for ParseError<'_, E> {
222 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
223 self.kind.fmt(f)
224 }
225}
226
227impl<E: fmt::Display + fmt::Debug> std::error::Error for ParseError<'_, E> {}
228
229pub struct ParserInput<'i> {
231 tokenizer: Tokenizer<'i>,
232 cached_token: Option<CachedToken<'i>>,
233}
234
235struct CachedToken<'i> {
236 token: Token<'i>,
237 start_position: SourcePosition,
238 end_state: ParserState,
239}
240
241impl<'i> ParserInput<'i> {
242 pub fn new(input: &'i str) -> ParserInput<'i> {
244 ParserInput {
245 tokenizer: Tokenizer::new(input),
246 cached_token: None,
247 }
248 }
249
250 #[inline]
251 fn cached_token_ref(&self) -> &Token<'i> {
252 &self.cached_token.as_ref().unwrap().token
253 }
254}
255
256pub struct Parser<'i, 't> {
260 input: &'t mut ParserInput<'i>,
261 at_start_of: Option<BlockType>,
263 stop_before: Delimiters,
265}
266
267#[derive(Copy, Clone, PartialEq, Eq, Debug)]
268pub(crate) enum BlockType {
269 Parenthesis,
270 SquareBracket,
271 CurlyBracket,
272}
273
274impl BlockType {
275 fn opening(token: &Token) -> Option<BlockType> {
276 match *token {
277 Token::Function(_) | Token::ParenthesisBlock => Some(BlockType::Parenthesis),
278 Token::SquareBracketBlock => Some(BlockType::SquareBracket),
279 Token::CurlyBracketBlock => Some(BlockType::CurlyBracket),
280 _ => None,
281 }
282 }
283
284 fn closing(token: &Token) -> Option<BlockType> {
285 match *token {
286 Token::CloseParenthesis => Some(BlockType::Parenthesis),
287 Token::CloseSquareBracket => Some(BlockType::SquareBracket),
288 Token::CloseCurlyBracket => Some(BlockType::CurlyBracket),
289 _ => None,
290 }
291 }
292}
293
294#[derive(Copy, Clone, PartialEq, Eq, Debug)]
302pub struct Delimiters {
303 bits: u8,
304}
305
306#[allow(non_upper_case_globals, non_snake_case)]
308pub mod Delimiter {
309 use super::Delimiters;
310
311 pub const None: Delimiters = Delimiters { bits: 0 };
313 pub const CurlyBracketBlock: Delimiters = Delimiters { bits: 1 << 1 };
315 pub const Semicolon: Delimiters = Delimiters { bits: 1 << 2 };
317 pub const Bang: Delimiters = Delimiters { bits: 1 << 3 };
319 pub const Comma: Delimiters = Delimiters { bits: 1 << 4 };
321}
322
323#[allow(non_upper_case_globals, non_snake_case)]
324mod ClosingDelimiter {
325 use super::Delimiters;
326
327 pub const CloseCurlyBracket: Delimiters = Delimiters { bits: 1 << 5 };
328 pub const CloseSquareBracket: Delimiters = Delimiters { bits: 1 << 6 };
329 pub const CloseParenthesis: Delimiters = Delimiters { bits: 1 << 7 };
330}
331
332impl BitOr<Delimiters> for Delimiters {
333 type Output = Delimiters;
334
335 #[inline]
336 fn bitor(self, other: Delimiters) -> Delimiters {
337 Delimiters {
338 bits: self.bits | other.bits,
339 }
340 }
341}
342
343impl Delimiters {
344 #[inline]
345 fn contains(self, other: Delimiters) -> bool {
346 (self.bits & other.bits) != 0
347 }
348
349 #[inline]
350 pub(crate) fn from_byte(byte: Option<u8>) -> Delimiters {
351 const TABLE: [Delimiters; 256] = {
352 let mut table = [Delimiter::None; 256];
353 table[b';' as usize] = Delimiter::Semicolon;
354 table[b'!' as usize] = Delimiter::Bang;
355 table[b',' as usize] = Delimiter::Comma;
356 table[b'{' as usize] = Delimiter::CurlyBracketBlock;
357 table[b'}' as usize] = ClosingDelimiter::CloseCurlyBracket;
358 table[b']' as usize] = ClosingDelimiter::CloseSquareBracket;
359 table[b')' as usize] = ClosingDelimiter::CloseParenthesis;
360 table
361 };
362
363 assert_eq!(TABLE[0], Delimiter::None);
364 TABLE[byte.unwrap_or(0) as usize]
365 }
366}
367
368macro_rules! expect {
370 ($parser: ident, $($branches: tt)+) => {
371 {
372 let start_location = $parser.current_source_location();
373 match *$parser.next()? {
374 $($branches)+
375 ref token => {
376 return Err(start_location.new_basic_unexpected_token_error(token.clone()))
377 }
378 }
379 }
380 }
381}
382
383impl<'i: 't, 't> Parser<'i, 't> {
384 #[inline]
386 pub fn new(input: &'t mut ParserInput<'i>) -> Parser<'i, 't> {
387 Parser {
388 input,
389 at_start_of: None,
390 stop_before: Delimiter::None,
391 }
392 }
393
394 pub fn current_line(&self) -> &'i str {
396 self.input.tokenizer.current_source_line()
397 }
398
399 #[inline]
403 pub fn is_exhausted(&mut self) -> bool {
404 self.expect_exhausted().is_ok()
405 }
406
407 #[inline]
412 pub fn expect_exhausted(&mut self) -> Result<(), BasicParseError<'i>> {
413 let start = self.state();
414 let result = match self.next() {
415 Err(BasicParseError {
416 kind: BasicParseErrorKind::EndOfInput,
417 ..
418 }) => Ok(()),
419 Err(e) => unreachable!("Unexpected error encountered: {:?}", e),
420 Ok(t) => Err(start
421 .source_location()
422 .new_basic_unexpected_token_error(t.clone())),
423 };
424 self.reset(&start);
425 result
426 }
427
428 #[inline]
432 pub fn position(&self) -> SourcePosition {
433 self.input.tokenizer.position()
434 }
435
436 #[inline]
438 pub fn current_source_location(&self) -> SourceLocation {
439 self.input.tokenizer.current_source_location()
440 }
441
442 pub fn current_source_map_url(&self) -> Option<&str> {
448 self.input.tokenizer.current_source_map_url()
449 }
450
451 pub fn current_source_url(&self) -> Option<&str> {
457 self.input.tokenizer.current_source_url()
458 }
459
460 #[inline]
462 pub fn new_basic_error(&self, kind: BasicParseErrorKind<'i>) -> BasicParseError<'i> {
463 self.current_source_location().new_basic_error(kind)
464 }
465
466 #[inline]
468 pub fn new_error<E>(&self, kind: BasicParseErrorKind<'i>) -> ParseError<'i, E> {
469 self.current_source_location().new_error(kind)
470 }
471
472 #[inline]
474 pub fn new_custom_error<E1: Into<E2>, E2>(&self, error: E1) -> ParseError<'i, E2> {
475 self.current_source_location().new_custom_error(error)
476 }
477
478 #[inline]
480 pub fn new_basic_unexpected_token_error(&self, token: Token<'i>) -> BasicParseError<'i> {
481 self.new_basic_error(BasicParseErrorKind::UnexpectedToken(token))
482 }
483
484 #[inline]
486 pub fn new_unexpected_token_error<E>(&self, token: Token<'i>) -> ParseError<'i, E> {
487 self.new_error(BasicParseErrorKind::UnexpectedToken(token))
488 }
489
490 #[inline]
492 pub fn new_error_for_next_token<E>(&mut self) -> ParseError<'i, E> {
493 let token = match self.next() {
494 Ok(token) => token.clone(),
495 Err(e) => return e.into(),
496 };
497 self.new_error(BasicParseErrorKind::UnexpectedToken(token))
498 }
499
500 #[inline]
504 pub fn state(&self) -> ParserState {
505 ParserState {
506 at_start_of: self.at_start_of,
507 ..self.input.tokenizer.state()
508 }
509 }
510
511 #[inline]
513 pub fn skip_whitespace(&mut self) {
514 if let Some(block_type) = self.at_start_of.take() {
515 consume_until_end_of_block(block_type, &mut self.input.tokenizer);
516 }
517
518 self.input.tokenizer.skip_whitespace()
519 }
520
521 #[inline]
522 pub(crate) fn skip_cdc_and_cdo(&mut self) {
523 if let Some(block_type) = self.at_start_of.take() {
524 consume_until_end_of_block(block_type, &mut self.input.tokenizer);
525 }
526
527 self.input.tokenizer.skip_cdc_and_cdo()
528 }
529
530 #[inline]
531 pub(crate) fn next_byte(&self) -> Option<u8> {
532 let byte = self.input.tokenizer.next_byte();
533 if self.stop_before.contains(Delimiters::from_byte(byte)) {
534 return None;
535 }
536 byte
537 }
538
539 #[inline]
544 pub fn reset(&mut self, state: &ParserState) {
545 self.input.tokenizer.reset(state);
546 self.at_start_of = state.at_start_of;
547 }
548
549 #[inline]
552 pub fn look_for_var_or_env_functions(&mut self) {
553 self.input.tokenizer.look_for_var_or_env_functions()
554 }
555
556 #[inline]
560 pub fn seen_var_or_env_functions(&mut self) -> bool {
561 self.input.tokenizer.seen_var_or_env_functions()
562 }
563
564 #[inline]
566 pub fn r#try<F, T, E>(&mut self, thing: F) -> Result<T, E>
567 where
568 F: FnOnce(&mut Parser<'i, 't>) -> Result<T, E>,
569 {
570 self.try_parse(thing)
571 }
572
573 #[inline]
578 pub fn try_parse<F, T, E>(&mut self, thing: F) -> Result<T, E>
579 where
580 F: FnOnce(&mut Parser<'i, 't>) -> Result<T, E>,
581 {
582 let start = self.state();
583 let result = thing(self);
584 if result.is_err() {
585 self.reset(&start)
586 }
587 result
588 }
589
590 #[inline]
592 pub fn slice(&self, range: Range<SourcePosition>) -> &'i str {
593 self.input.tokenizer.slice(range)
594 }
595
596 #[inline]
598 pub fn slice_from(&self, start_position: SourcePosition) -> &'i str {
599 self.input.tokenizer.slice_from(start_position)
600 }
601
602 #[allow(clippy::should_implement_trait)]
614 pub fn next(&mut self) -> Result<&Token<'i>, BasicParseError<'i>> {
615 self.skip_whitespace();
616 self.next_including_whitespace_and_comments()
617 }
618
619 pub fn next_including_whitespace(&mut self) -> Result<&Token<'i>, BasicParseError<'i>> {
621 loop {
622 match self.next_including_whitespace_and_comments() {
623 Err(e) => return Err(e),
624 Ok(&Token::Comment(_)) => {}
625 _ => break,
626 }
627 }
628 Ok(self.input.cached_token_ref())
629 }
630
631 pub fn next_including_whitespace_and_comments(
638 &mut self,
639 ) -> Result<&Token<'i>, BasicParseError<'i>> {
640 if let Some(block_type) = self.at_start_of.take() {
641 consume_until_end_of_block(block_type, &mut self.input.tokenizer);
642 }
643
644 let byte = self.input.tokenizer.next_byte();
645 if self.stop_before.contains(Delimiters::from_byte(byte)) {
646 return Err(self.new_basic_error(BasicParseErrorKind::EndOfInput));
647 }
648
649 let token_start_position = self.input.tokenizer.position();
650 let using_cached_token = self
651 .input
652 .cached_token
653 .as_ref()
654 .map_or(false, |cached_token| {
655 cached_token.start_position == token_start_position
656 });
657 let token = if using_cached_token {
658 let cached_token = self.input.cached_token.as_ref().unwrap();
659 self.input.tokenizer.reset(&cached_token.end_state);
660 if let Token::Function(ref name) = cached_token.token {
661 self.input.tokenizer.see_function(name)
662 }
663 &cached_token.token
664 } else {
665 let new_token = self
666 .input
667 .tokenizer
668 .next()
669 .map_err(|()| self.new_basic_error(BasicParseErrorKind::EndOfInput))?;
670 self.input.cached_token = Some(CachedToken {
671 token: new_token,
672 start_position: token_start_position,
673 end_state: self.input.tokenizer.state(),
674 });
675 self.input.cached_token_ref()
676 };
677
678 if let Some(block_type) = BlockType::opening(token) {
679 self.at_start_of = Some(block_type);
680 }
681 Ok(token)
682 }
683
684 #[inline]
689 pub fn parse_entirely<F, T, E>(&mut self, parse: F) -> Result<T, ParseError<'i, E>>
690 where
691 F: FnOnce(&mut Parser<'i, 't>) -> Result<T, ParseError<'i, E>>,
692 {
693 let result = parse(self)?;
694 self.expect_exhausted()?;
695 Ok(result)
696 }
697
698 #[inline]
710 pub fn parse_comma_separated<F, T, E>(
711 &mut self,
712 parse_one: F,
713 ) -> Result<Vec<T>, ParseError<'i, E>>
714 where
715 F: for<'tt> FnMut(&mut Parser<'i, 'tt>) -> Result<T, ParseError<'i, E>>,
716 {
717 self.parse_comma_separated_internal(parse_one, false)
718 }
719
720 #[inline]
726 pub fn parse_comma_separated_ignoring_errors<F, T, E: 'i>(&mut self, parse_one: F) -> Vec<T>
727 where
728 F: for<'tt> FnMut(&mut Parser<'i, 'tt>) -> Result<T, ParseError<'i, E>>,
729 {
730 match self.parse_comma_separated_internal(parse_one, true) {
731 Ok(values) => values,
732 Err(..) => unreachable!(),
733 }
734 }
735
736 #[inline]
737 fn parse_comma_separated_internal<F, T, E>(
738 &mut self,
739 mut parse_one: F,
740 ignore_errors: bool,
741 ) -> Result<Vec<T>, ParseError<'i, E>>
742 where
743 F: for<'tt> FnMut(&mut Parser<'i, 'tt>) -> Result<T, ParseError<'i, E>>,
744 {
745 let mut values = Vec::with_capacity(1);
750 loop {
751 self.skip_whitespace(); match self.parse_until_before(Delimiter::Comma, &mut parse_one) {
753 Ok(v) => values.push(v),
754 Err(e) if !ignore_errors => return Err(e),
755 Err(_) => {}
756 }
757 match self.next() {
758 Err(_) => return Ok(values),
759 Ok(&Token::Comma) => continue,
760 Ok(_) => unreachable!(),
761 }
762 }
763 }
764
765 #[inline]
777 pub fn parse_nested_block<F, T, E>(&mut self, parse: F) -> Result<T, ParseError<'i, E>>
778 where
779 F: for<'tt> FnOnce(&mut Parser<'i, 'tt>) -> Result<T, ParseError<'i, E>>,
780 {
781 parse_nested_block(self, parse)
782 }
783
784 #[inline]
793 pub fn parse_until_before<F, T, E>(
794 &mut self,
795 delimiters: Delimiters,
796 parse: F,
797 ) -> Result<T, ParseError<'i, E>>
798 where
799 F: for<'tt> FnOnce(&mut Parser<'i, 'tt>) -> Result<T, ParseError<'i, E>>,
800 {
801 parse_until_before(self, delimiters, ParseUntilErrorBehavior::Consume, parse)
802 }
803
804 #[inline]
810 pub fn parse_until_after<F, T, E>(
811 &mut self,
812 delimiters: Delimiters,
813 parse: F,
814 ) -> Result<T, ParseError<'i, E>>
815 where
816 F: for<'tt> FnOnce(&mut Parser<'i, 'tt>) -> Result<T, ParseError<'i, E>>,
817 {
818 parse_until_after(self, delimiters, ParseUntilErrorBehavior::Consume, parse)
819 }
820
821 #[inline]
823 pub fn expect_whitespace(&mut self) -> Result<&'i str, BasicParseError<'i>> {
824 let start_location = self.current_source_location();
825 match *self.next_including_whitespace()? {
826 Token::WhiteSpace(value) => Ok(value),
827 ref t => Err(start_location.new_basic_unexpected_token_error(t.clone())),
828 }
829 }
830
831 #[inline]
833 pub fn expect_ident(&mut self) -> Result<&CowRcStr<'i>, BasicParseError<'i>> {
834 expect! {self,
835 Token::Ident(ref value) => Ok(value),
836 }
837 }
838
839 #[inline]
841 pub fn expect_ident_cloned(&mut self) -> Result<CowRcStr<'i>, BasicParseError<'i>> {
842 self.expect_ident().cloned()
843 }
844
845 #[inline]
847 pub fn expect_ident_matching(
848 &mut self,
849 expected_value: &str,
850 ) -> Result<(), BasicParseError<'i>> {
851 expect! {self,
852 Token::Ident(ref value) if value.eq_ignore_ascii_case(expected_value) => Ok(()),
853 }
854 }
855
856 #[inline]
858 pub fn expect_string(&mut self) -> Result<&CowRcStr<'i>, BasicParseError<'i>> {
859 expect! {self,
860 Token::QuotedString(ref value) => Ok(value),
861 }
862 }
863
864 #[inline]
866 pub fn expect_string_cloned(&mut self) -> Result<CowRcStr<'i>, BasicParseError<'i>> {
867 self.expect_string().cloned()
868 }
869
870 #[inline]
872 pub fn expect_ident_or_string(&mut self) -> Result<&CowRcStr<'i>, BasicParseError<'i>> {
873 expect! {self,
874 Token::Ident(ref value) => Ok(value),
875 Token::QuotedString(ref value) => Ok(value),
876 }
877 }
878
879 #[inline]
881 pub fn expect_url(&mut self) -> Result<CowRcStr<'i>, BasicParseError<'i>> {
882 expect! {self,
883 Token::UnquotedUrl(ref value) => Ok(value.clone()),
884 Token::Function(ref name) if name.eq_ignore_ascii_case("url") => {
885 self.parse_nested_block(|input| {
886 input.expect_string().map_err(Into::into).cloned()
887 })
888 .map_err(ParseError::<()>::basic)
889 }
890 }
891 }
892
893 #[inline]
895 pub fn expect_url_or_string(&mut self) -> Result<CowRcStr<'i>, BasicParseError<'i>> {
896 expect! {self,
897 Token::UnquotedUrl(ref value) => Ok(value.clone()),
898 Token::QuotedString(ref value) => Ok(value.clone()),
899 Token::Function(ref name) if name.eq_ignore_ascii_case("url") => {
900 self.parse_nested_block(|input| {
901 input.expect_string().map_err(Into::into).cloned()
902 })
903 .map_err(ParseError::<()>::basic)
904 }
905 }
906 }
907
908 #[inline]
910 pub fn expect_number(&mut self) -> Result<f32, BasicParseError<'i>> {
911 expect! {self,
912 Token::Number { value, .. } => Ok(value),
913 }
914 }
915
916 #[inline]
918 pub fn expect_integer(&mut self) -> Result<i32, BasicParseError<'i>> {
919 expect! {self,
920 Token::Number { int_value: Some(int_value), .. } => Ok(int_value),
921 }
922 }
923
924 #[inline]
927 pub fn expect_percentage(&mut self) -> Result<f32, BasicParseError<'i>> {
928 expect! {self,
929 Token::Percentage { unit_value, .. } => Ok(unit_value),
930 }
931 }
932
933 #[inline]
935 pub fn expect_colon(&mut self) -> Result<(), BasicParseError<'i>> {
936 expect! {self,
937 Token::Colon => Ok(()),
938 }
939 }
940
941 #[inline]
943 pub fn expect_semicolon(&mut self) -> Result<(), BasicParseError<'i>> {
944 expect! {self,
945 Token::Semicolon => Ok(()),
946 }
947 }
948
949 #[inline]
951 pub fn expect_comma(&mut self) -> Result<(), BasicParseError<'i>> {
952 expect! {self,
953 Token::Comma => Ok(()),
954 }
955 }
956
957 #[inline]
959 pub fn expect_delim(&mut self, expected_value: char) -> Result<(), BasicParseError<'i>> {
960 expect! {self,
961 Token::Delim(value) if value == expected_value => Ok(()),
962 }
963 }
964
965 #[inline]
969 pub fn expect_curly_bracket_block(&mut self) -> Result<(), BasicParseError<'i>> {
970 expect! {self,
971 Token::CurlyBracketBlock => Ok(()),
972 }
973 }
974
975 #[inline]
979 pub fn expect_square_bracket_block(&mut self) -> Result<(), BasicParseError<'i>> {
980 expect! {self,
981 Token::SquareBracketBlock => Ok(()),
982 }
983 }
984
985 #[inline]
989 pub fn expect_parenthesis_block(&mut self) -> Result<(), BasicParseError<'i>> {
990 expect! {self,
991 Token::ParenthesisBlock => Ok(()),
992 }
993 }
994
995 #[inline]
999 pub fn expect_function(&mut self) -> Result<&CowRcStr<'i>, BasicParseError<'i>> {
1000 expect! {self,
1001 Token::Function(ref name) => Ok(name),
1002 }
1003 }
1004
1005 #[inline]
1009 pub fn expect_function_matching(
1010 &mut self,
1011 expected_name: &str,
1012 ) -> Result<(), BasicParseError<'i>> {
1013 expect! {self,
1014 Token::Function(ref name) if name.eq_ignore_ascii_case(expected_name) => Ok(()),
1015 }
1016 }
1017
1018 #[inline]
1022 pub fn expect_no_error_token(&mut self) -> Result<(), BasicParseError<'i>> {
1023 loop {
1024 match self.next_including_whitespace_and_comments() {
1025 Ok(&Token::Function(_))
1026 | Ok(&Token::ParenthesisBlock)
1027 | Ok(&Token::SquareBracketBlock)
1028 | Ok(&Token::CurlyBracketBlock) => self
1029 .parse_nested_block(|input| input.expect_no_error_token().map_err(Into::into))
1030 .map_err(ParseError::<()>::basic)?,
1031 Ok(t) => {
1032 if t.is_parse_error() {
1035 let token = t.clone();
1036 return Err(self.new_basic_unexpected_token_error(token));
1037 }
1038 }
1039 Err(_) => return Ok(()),
1040 }
1041 }
1042 }
1043}
1044
1045pub fn parse_until_before<'i: 't, 't, F, T, E>(
1046 parser: &mut Parser<'i, 't>,
1047 delimiters: Delimiters,
1048 error_behavior: ParseUntilErrorBehavior,
1049 parse: F,
1050) -> Result<T, ParseError<'i, E>>
1051where
1052 F: for<'tt> FnOnce(&mut Parser<'i, 'tt>) -> Result<T, ParseError<'i, E>>,
1053{
1054 let delimiters = parser.stop_before | delimiters;
1055 let result;
1056 {
1058 let mut delimited_parser = Parser {
1059 input: parser.input,
1060 at_start_of: parser.at_start_of.take(),
1061 stop_before: delimiters,
1062 };
1063 result = delimited_parser.parse_entirely(parse);
1064 if error_behavior == ParseUntilErrorBehavior::Stop && result.is_err() {
1065 return result;
1066 }
1067 if let Some(block_type) = delimited_parser.at_start_of {
1068 consume_until_end_of_block(block_type, &mut delimited_parser.input.tokenizer);
1069 }
1070 }
1071 loop {
1073 if delimiters.contains(Delimiters::from_byte(parser.input.tokenizer.next_byte())) {
1074 break;
1075 }
1076 if let Ok(token) = parser.input.tokenizer.next() {
1077 if let Some(block_type) = BlockType::opening(&token) {
1078 consume_until_end_of_block(block_type, &mut parser.input.tokenizer);
1079 }
1080 } else {
1081 break;
1082 }
1083 }
1084 result
1085}
1086
1087pub fn parse_until_after<'i: 't, 't, F, T, E>(
1088 parser: &mut Parser<'i, 't>,
1089 delimiters: Delimiters,
1090 error_behavior: ParseUntilErrorBehavior,
1091 parse: F,
1092) -> Result<T, ParseError<'i, E>>
1093where
1094 F: for<'tt> FnOnce(&mut Parser<'i, 'tt>) -> Result<T, ParseError<'i, E>>,
1095{
1096 let result = parse_until_before(parser, delimiters, error_behavior, parse);
1097 if error_behavior == ParseUntilErrorBehavior::Stop && result.is_err() {
1098 return result;
1099 }
1100 let next_byte = parser.input.tokenizer.next_byte();
1101 if next_byte.is_some()
1102 && !parser
1103 .stop_before
1104 .contains(Delimiters::from_byte(next_byte))
1105 {
1106 debug_assert!(delimiters.contains(Delimiters::from_byte(next_byte)));
1107 parser.input.tokenizer.advance(1);
1109 if next_byte == Some(b'{') {
1110 consume_until_end_of_block(BlockType::CurlyBracket, &mut parser.input.tokenizer);
1111 }
1112 }
1113 result
1114}
1115
1116pub fn parse_nested_block<'i: 't, 't, F, T, E>(
1117 parser: &mut Parser<'i, 't>,
1118 parse: F,
1119) -> Result<T, ParseError<'i, E>>
1120where
1121 F: for<'tt> FnOnce(&mut Parser<'i, 'tt>) -> Result<T, ParseError<'i, E>>,
1122{
1123 let block_type = parser.at_start_of.take().expect(
1124 "\
1125 A nested parser can only be created when a Function, \
1126 ParenthesisBlock, SquareBracketBlock, or CurlyBracketBlock \
1127 token was just consumed.\
1128 ",
1129 );
1130 let closing_delimiter = match block_type {
1131 BlockType::CurlyBracket => ClosingDelimiter::CloseCurlyBracket,
1132 BlockType::SquareBracket => ClosingDelimiter::CloseSquareBracket,
1133 BlockType::Parenthesis => ClosingDelimiter::CloseParenthesis,
1134 };
1135 let result;
1136 {
1138 let mut nested_parser = Parser {
1139 input: parser.input,
1140 at_start_of: None,
1141 stop_before: closing_delimiter,
1142 };
1143 result = nested_parser.parse_entirely(parse);
1144 if let Some(block_type) = nested_parser.at_start_of {
1145 consume_until_end_of_block(block_type, &mut nested_parser.input.tokenizer);
1146 }
1147 }
1148 consume_until_end_of_block(block_type, &mut parser.input.tokenizer);
1149 result
1150}
1151
1152#[inline(never)]
1153#[cold]
1154fn consume_until_end_of_block(block_type: BlockType, tokenizer: &mut Tokenizer) {
1155 let mut stack = SmallVec::<[BlockType; 16]>::new();
1156 stack.push(block_type);
1157
1158 while let Ok(ref token) = tokenizer.next() {
1160 if let Some(b) = BlockType::closing(token) {
1161 if *stack.last().unwrap() == b {
1162 stack.pop();
1163 if stack.is_empty() {
1164 return;
1165 }
1166 }
1167 }
1168
1169 if let Some(block_type) = BlockType::opening(token) {
1170 stack.push(block_type);
1171 }
1172 }
1173}