1use stylex_macros::stylex_unreachable;
18
19use crate::{
20 CssParseError,
21 token_types::{SimpleToken, TokenList},
22};
23use std::fmt::{Debug, Display};
24use std::rc::Rc;
25
26pub mod tokens {
28 use super::*;
29
30 pub fn ident() -> TokenParser<SimpleToken> {
31 TokenParser::<SimpleToken>::token(SimpleToken::Ident(String::new()), Some("Ident"))
32 }
33
34 pub fn whitespace() -> TokenParser<SimpleToken> {
35 TokenParser::<SimpleToken>::token(SimpleToken::Whitespace, Some("Whitespace"))
36 }
37
38 pub fn comma() -> TokenParser<SimpleToken> {
39 TokenParser::<SimpleToken>::token(SimpleToken::Comma, Some("Comma"))
40 }
41
42 pub fn colon() -> TokenParser<SimpleToken> {
43 TokenParser::<SimpleToken>::token(SimpleToken::Colon, Some("Colon"))
44 }
45
46 pub fn semicolon() -> TokenParser<SimpleToken> {
47 TokenParser::<SimpleToken>::token(SimpleToken::Semicolon, Some("Semicolon"))
48 }
49
50 pub fn number() -> TokenParser<SimpleToken> {
51 TokenParser::<SimpleToken>::token(SimpleToken::Number(0.0), Some("Number"))
52 }
53
54 pub fn percentage() -> TokenParser<SimpleToken> {
55 TokenParser::<SimpleToken>::token(SimpleToken::Percentage(0.0), Some("Percentage"))
56 }
57
58 pub fn dimension() -> TokenParser<SimpleToken> {
59 TokenParser::<SimpleToken>::token(
60 SimpleToken::Dimension {
61 value: 0.0,
62 unit: String::new(),
63 },
64 Some("Dimension"),
65 )
66 }
67
68 pub fn function() -> TokenParser<SimpleToken> {
69 TokenParser::<SimpleToken>::token(SimpleToken::Function(String::new()), Some("Function"))
70 }
71
72 pub fn string() -> TokenParser<SimpleToken> {
73 TokenParser::<SimpleToken>::token(SimpleToken::String(String::new()), Some("String"))
74 }
75
76 pub fn hash() -> TokenParser<SimpleToken> {
77 TokenParser::<SimpleToken>::token(SimpleToken::Hash(String::new()), Some("Hash"))
78 }
79
80 pub fn url() -> TokenParser<SimpleToken> {
81 TokenParser::<SimpleToken>::token(SimpleToken::Url(String::new()), Some("URL"))
82 }
83
84 pub fn open_paren() -> TokenParser<SimpleToken> {
85 TokenParser::<SimpleToken>::token(SimpleToken::LeftParen, Some("OpenParen"))
86 }
87
88 pub fn close_paren() -> TokenParser<SimpleToken> {
89 TokenParser::<SimpleToken>::token(SimpleToken::RightParen, Some("CloseParen"))
90 }
91
92 pub fn open_square() -> TokenParser<SimpleToken> {
93 TokenParser::<SimpleToken>::token(SimpleToken::LeftBracket, Some("OpenSquare"))
94 }
95
96 pub fn close_square() -> TokenParser<SimpleToken> {
97 TokenParser::<SimpleToken>::token(SimpleToken::RightBracket, Some("CloseSquare"))
98 }
99
100 pub fn open_curly() -> TokenParser<SimpleToken> {
101 TokenParser::<SimpleToken>::token(SimpleToken::LeftBrace, Some("OpenCurly"))
102 }
103
104 pub fn close_curly() -> TokenParser<SimpleToken> {
105 TokenParser::<SimpleToken>::token(SimpleToken::RightBrace, Some("CloseCurly"))
106 }
107
108 pub fn delim(ch: char) -> TokenParser<SimpleToken> {
109 TokenParser::<SimpleToken>::token(SimpleToken::Delim(ch), Some("Delim"))
110 }
111}
112
113#[derive(Clone, Debug, PartialEq)]
114pub enum Either<T, U> {
115 Left(T),
116 Right(U),
117}
118
119pub type RunFn<T> = Rc<dyn Fn(&mut TokenList) -> Result<T, CssParseError>>;
121
122#[derive(Clone)]
123pub struct TokenParser<T: Clone + Debug> {
124 pub run: RunFn<T>,
125 pub label: String,
126}
127
128impl<T: Clone + Debug + 'static> TokenParser<T> {
129 pub fn new<F>(parser_fn: F, label: &str) -> Self
131 where
132 F: Fn(&mut TokenList) -> Result<T, CssParseError> + 'static,
133 {
134 Self {
135 run: Rc::new(parser_fn),
136 label: label.to_string(),
137 }
138 }
139
140 pub fn parse(&self, css: &str) -> Result<T, CssParseError> {
142 let mut tokens = TokenList::new(css);
143 (self.run)(&mut tokens)
144 }
145
146 pub fn parse_to_end(&self, css: &str) -> Result<T, CssParseError> {
148 let mut tokens = TokenList::new(css);
149 let initial_index = tokens.current_index;
150
151 let output = (self.run)(&mut tokens);
152
153 match output {
154 Ok(value) => {
155 if let Some(token) = tokens.peek()? {
157 let consumed_tokens = tokens.slice(initial_index, Some(tokens.current_index));
158 return Err(CssParseError::ParseError {
159 message: format!(
160 "Expected end of input, got {:?} instead\nConsumed tokens: {:?}",
161 token, consumed_tokens
162 ),
163 });
164 }
165 Ok(value)
166 },
167 Err(error) => {
168 let consumed_tokens = tokens.slice(initial_index, Some(tokens.current_index));
169 tokens.set_current_index(initial_index);
170 Err(CssParseError::ParseError {
171 message: format!(
172 "Expected {} but got {}\nConsumed tokens: {:?}",
173 self.label, error, consumed_tokens
174 ),
175 })
176 },
177 }
178 }
179
180 pub fn map<U, F>(&self, f: F, label: Option<&str>) -> TokenParser<U>
182 where
183 U: Clone + Debug + 'static,
184 F: Fn(T) -> U + 'static,
185 {
186 let run_fn = self.run.clone();
187 let new_label = format!("{}.map({})", self.label, label.unwrap_or(""));
188
189 TokenParser::new(
190 move |tokens| {
191 let current_index = tokens.current_index;
192 match (run_fn)(tokens) {
193 Ok(value) => Ok(f(value)),
194 Err(e) => {
195 tokens.set_current_index(current_index);
196 Err(e)
197 },
198 }
199 },
200 &new_label,
201 )
202 }
203
204 pub fn flat_map<U, F>(&self, f: F, label: Option<&str>) -> TokenParser<U>
206 where
207 U: Clone + Debug + 'static,
208 F: Fn(T) -> TokenParser<U> + 'static,
209 {
210 let run_fn = self.run.clone();
211 let new_label = format!("{}.flatMap({})", self.label, label.unwrap_or(""));
212
213 TokenParser::new(
214 move |tokens| {
215 let current_index = tokens.current_index;
216
217 let output1 = match (run_fn)(tokens) {
218 Ok(value) => value,
219 Err(e) => {
220 tokens.set_current_index(current_index);
221 return Err(e);
222 },
223 };
224
225 let second_parser = f(output1);
226 match (second_parser.run)(tokens) {
227 Ok(output2) => Ok(output2),
228 Err(e) => {
229 tokens.set_current_index(current_index);
230 Err(e)
231 },
232 }
233 },
234 &new_label,
235 )
236 }
237
238 pub fn or<U>(&self, other: TokenParser<U>) -> TokenParser<Either<T, U>>
240 where
241 U: Clone + Debug + 'static,
242 {
243 let run_fn1 = self.run.clone();
244 let run_fn2 = other.run.clone();
245 let new_label = if other.label == "optional" {
246 format!("Optional<{}>", self.label)
247 } else {
248 format!("OneOf<{}, {}>", self.label, other.label)
249 };
250
251 TokenParser::new(
252 move |tokens| {
253 let current_index = tokens.current_index;
254
255 match (run_fn1)(tokens) {
256 Ok(value) => Ok(Either::Left(value)),
257 Err(_) => {
258 tokens.set_current_index(current_index);
259 match (run_fn2)(tokens) {
260 Ok(value) => Ok(Either::Right(value)),
261 Err(e) => {
262 tokens.set_current_index(current_index);
263 Err(e)
264 },
265 }
266 },
267 }
268 },
269 &new_label,
270 )
271 }
272
273 pub fn where_predicate<F>(&self, predicate: F, label: Option<&str>) -> TokenParser<T>
275 where
276 F: Fn(&T) -> bool + 'static,
277 {
278 let description = label.unwrap_or("");
279 self.flat_map(
280 move |output| {
281 if predicate(&output) {
282 TokenParser::always(output)
283 } else {
284 TokenParser::never()
285 }
286 },
287 Some(description),
288 )
289 }
290
291 pub fn where_fn<F>(&self, predicate: F, label: Option<&str>) -> TokenParser<T>
292 where
293 F: Fn(&T) -> bool + 'static,
294 {
295 self.where_predicate(predicate, label)
296 }
297
298 pub fn surrounded_by<P, S>(
300 &self,
301 prefix: TokenParser<P>,
302 suffix: Option<TokenParser<S>>,
303 ) -> TokenParser<T>
304 where
305 P: Clone + Debug + 'static,
306 S: Clone + Debug + 'static,
307 {
308 let main_parser = self.clone();
309 match suffix {
310 Some(suffix_parser) => {
311 prefix.flat_map(
313 move |_| {
314 let main = main_parser.clone();
315 let suffix = suffix_parser.clone();
316 main.flat_map(
317 move |value| {
318 let result_value = value.clone();
319 suffix.map(move |_| result_value.clone(), None)
320 },
321 Some("surrounded_middle"),
322 )
323 },
324 Some("surrounded_prefix"),
325 )
326 },
327 None => {
328 let prefix_clone = prefix.clone();
330 prefix.flat_map(
331 move |_| {
332 let main = main_parser.clone();
333 let prefix_clone2 = prefix_clone.clone();
334 main.flat_map(
335 move |value| {
336 let result_value = value.clone();
337 prefix_clone2.map(move |_| result_value.clone(), None)
338 },
339 Some("surrounded_middle_same"),
340 )
341 },
342 Some("surrounded_prefix_same"),
343 )
344 },
345 }
346 }
347
348 pub fn skip<U>(&self, skip_parser: TokenParser<U>) -> TokenParser<T>
350 where
351 U: Clone + Debug + 'static,
352 {
353 self.flat_map(
354 move |output| {
355 let output_clone = output.clone();
356 skip_parser.map(move |_| output_clone.clone(), None)
357 },
358 Some("skip"),
359 )
360 }
361
362 pub fn prefix<P>(&self, prefix_parser: TokenParser<P>) -> TokenParser<T>
364 where
365 P: Clone + Debug + 'static,
366 {
367 prefix_parser.flat_map(
368 {
369 let self_clone = self.clone();
370 move |_| self_clone.clone()
371 },
372 Some("prefix"),
373 )
374 }
375
376 pub fn suffix<S>(&self, suffix_parser: TokenParser<S>) -> TokenParser<T>
378 where
379 S: Clone + Debug + 'static,
380 {
381 self.flat_map(
382 move |output| {
383 let output_clone = output.clone();
384 suffix_parser.map(move |_| output_clone.clone(), None)
385 },
386 Some("suffix"),
387 )
388 }
389
390 pub fn separated_by<S: Clone + Debug + 'static>(
392 &self,
393 separator: TokenParser<S>,
394 ) -> SeparatedParser<T, S> {
395 SeparatedParser {
396 parser: self.clone(),
397 separator,
398 }
399 }
400
401 pub fn label(&self) -> &str {
403 &self.label
404 }
405
406 pub fn debug(&self, css: &str) -> Result<T, CssParseError> {
409 println!("š DEBUG: Parsing '{}' with parser '{}'", css, self.label);
410 let mut tokens = TokenList::new(css);
411 let result = (self.run)(&mut tokens);
412 match &result {
413 Ok(_value) => println!(
414 "ā
SUCCESS: Parser '{}' matched. Consumed {} tokens.",
415 self.label, tokens.current_index
416 ),
417 Err(error) => println!(
418 "ā FAILED: Parser '{}' failed at token {}. Error: {}",
419 self.label, tokens.current_index, error
420 ),
421 }
422 result
423 }
424
425 pub fn parse_with_context(&self, css: &str) -> Result<T, CssParseError> {
426 let mut tokens = TokenList::new(css);
427 let _initial_index = tokens.current_index;
428 let result = (self.run)(&mut tokens);
429
430 match result {
431 Err(error) => {
432 let context_tokens = peek_tokens(css, 5);
433 let remaining_css = &css[tokens.current_index.min(css.len())..];
434 Err(CssParseError::ParseError {
435 message: format!(
436 "{}\nš Context: Failed at position {} in '{}'\nš Next tokens: {:?}\nš Remaining: '{}'",
437 error,
438 tokens.current_index,
439 css,
440 context_tokens,
441 remaining_css.chars().take(20).collect::<String>()
442 ),
443 })
444 },
445 Ok(value) => Ok(value),
446 }
447 }
448
449 pub fn with_label(mut self, new_label: &str) -> Self {
451 self.label = new_label.to_string();
452 self
453 }
454
455 pub fn always(value: T) -> TokenParser<T> {
457 let label = if std::any::type_name::<T>() == "()" {
458 "optional".to_string()
459 } else {
460 format!("Always<{:?}>", value)
461 };
462 TokenParser::new(move |_| Ok(value.clone()), &label)
463 }
464
465 pub fn never() -> TokenParser<T> {
467 TokenParser::new(
468 |_| {
469 Err(CssParseError::ParseError {
470 message: "Never".to_string(),
471 })
472 },
473 "Never",
474 )
475 }
476
477 pub fn separated_by_optional_whitespace(self) -> TokenParser<Vec<T>>
478 where
479 T: Clone + Debug + 'static,
480 {
481 self
482 .separated_by(tokens::whitespace().optional())
483 .one_or_more()
484 }
485
486 pub fn one_of(parsers: Vec<TokenParser<T>>) -> TokenParser<T> {
488 TokenParser::new(
489 move |tokens| {
490 let mut errors = Vec::new();
491 let index = tokens.current_index;
492
493 for parser in &parsers {
494 match (parser.run)(tokens) {
495 Ok(output) => return Ok(output),
496 Err(e) => {
497 tokens.set_current_index(index);
498 errors.push(e);
499 },
500 }
501 }
502
503 Err(CssParseError::ParseError {
504 message: format!(
505 "No parser matched\n{}",
506 errors
507 .iter()
508 .map(|err| format!("- {}", err))
509 .collect::<Vec<_>>()
510 .join("\n")
511 ),
512 })
513 },
514 "oneOf",
515 )
516 }
517
518 pub fn sequence<U: Clone + Debug + 'static>(parsers: Vec<TokenParser<U>>) -> TokenParser<Vec<U>> {
520 TokenParser::new(
521 move |tokens| {
522 let current_index = tokens.current_index;
523 let mut results = Vec::new();
524
525 for parser in &parsers {
526 match (parser.run)(tokens) {
527 Ok(value) => results.push(value),
528 Err(e) => {
529 tokens.set_current_index(current_index);
530 return Err(e);
531 },
532 }
533 }
534
535 Ok(results)
536 },
537 "sequence",
538 )
539 }
540
541 pub fn sequence_with_separators<U: Clone + Debug + 'static>(
543 parsers: Vec<TokenParser<U>>,
544 ) -> SequenceParsers<U> {
545 SequenceParsers::new(parsers)
546 }
547
548 pub fn flexible_sequence_separated_by<U: Clone + Debug + 'static, S: Clone + Debug + 'static>(
551 parsers: Vec<Either<TokenParser<U>, TokenParser<Option<U>>>>,
552 separator: TokenParser<S>,
553 ) -> TokenParser<Vec<Option<U>>> {
554 TokenParser::new(
555 move |tokens| {
556 let current_index = tokens.current_index;
557 let mut results = Vec::new();
558
559 for (i, parser_either) in parsers.iter().enumerate() {
560 if i > 0 {
562 let separator_index = tokens.current_index;
563 let separator_consumed = (separator.run)(tokens).is_ok();
564
565 match parser_either {
566 Either::Left(required_parser) => {
567 if !separator_consumed && i > 0 {
569 tokens.set_current_index(current_index);
570 return Err(CssParseError::ParseError {
571 message: format!("Expected separator before required parser {}", i),
572 });
573 }
574
575 match (required_parser.run)(tokens) {
576 Ok(value) => results.push(Some(value)),
577 Err(e) => {
578 tokens.set_current_index(current_index);
579 return Err(e);
580 },
581 }
582 },
583 Either::Right(optional_parser) => {
584 match (optional_parser.run)(tokens) {
586 Ok(Some(value)) => {
587 if !separator_consumed && i > 0 {
589 tokens.set_current_index(current_index);
590 return Err(CssParseError::ParseError {
591 message: format!(
592 "Expected separator before optional parser {} that matched",
593 i
594 ),
595 });
596 }
597 results.push(Some(value));
598 },
599 Ok(None) => {
600 if separator_consumed {
602 tokens.set_current_index(separator_index);
603 }
604 results.push(None);
605 },
606 Err(_) => {
607 if separator_consumed {
609 tokens.set_current_index(separator_index);
610 }
611 results.push(None);
612 },
613 }
614 },
615 }
616 } else {
617 match parser_either {
619 Either::Left(required_parser) => match (required_parser.run)(tokens) {
620 Ok(value) => results.push(Some(value)),
621 Err(e) => {
622 tokens.set_current_index(current_index);
623 return Err(e);
624 },
625 },
626 Either::Right(optional_parser) => match (optional_parser.run)(tokens) {
627 Ok(option_value) => results.push(option_value),
628 Err(_) => results.push(None),
629 },
630 }
631 }
632 }
633
634 Ok(results)
635 },
636 "flexibleSequenceSeparatedBy",
637 )
638 }
639
640 pub fn set_of<U: Clone + Debug + 'static>(parsers: Vec<TokenParser<U>>) -> SetOfParsers<U> {
643 SetOfParsers::new(parsers)
644 }
645
646 pub fn zero_or_more(parser: TokenParser<T>) -> TokenParser<Vec<T>> {
648 let label = format!("ZeroOrMore<{}>", parser.label);
649 TokenParser::new(
650 move |tokens| {
651 let mut results = Vec::new();
652 loop {
653 let current_index = tokens.current_index;
654 match (parser.run)(tokens) {
655 Ok(value) => results.push(value),
656 Err(_) => {
657 tokens.set_current_index(current_index);
658 break;
659 },
660 }
661 }
662 Ok(results)
663 },
664 &label,
665 )
666 }
667
668 pub fn one_or_more(parser: TokenParser<T>) -> TokenParser<Vec<T>> {
670 let label = format!("OneOrMore<{}>", parser.label);
671 TokenParser::new(
672 move |tokens| {
673 let mut results = Vec::new();
674 let start_index = tokens.current_index;
675
676 match (parser.run)(tokens) {
678 Ok(value) => results.push(value),
679 Err(e) => {
680 tokens.set_current_index(start_index);
681 return Err(e);
682 },
683 }
684
685 loop {
687 let current_index = tokens.current_index;
688 match (parser.run)(tokens) {
689 Ok(value) => results.push(value),
690 Err(_) => {
691 tokens.set_current_index(current_index);
692 break;
693 },
694 }
695 }
696
697 Ok(results)
698 },
699 &label,
700 )
701 }
702
703 pub fn token(expected_token: SimpleToken, label: Option<&str>) -> TokenParser<SimpleToken> {
705 let label_str = label
706 .unwrap_or(&format!("{:?}", expected_token))
707 .to_string();
708
709 TokenParser::new(
710 move |tokens| {
711 let current_index = tokens.current_index;
712
713 match tokens.consume_next_token() {
714 Ok(Some(token)) => {
715 if std::mem::discriminant(&token) == std::mem::discriminant(&expected_token) {
716 Ok(token)
717 } else {
718 tokens.set_current_index(current_index);
719 Err(CssParseError::ParseError {
720 message: format!("Expected token type {:?}, got {:?}", expected_token, token),
721 })
722 }
723 },
724 Ok(None) => {
725 tokens.set_current_index(current_index);
726 Err(CssParseError::ParseError {
727 message: "Expected token, got end of input".to_string(),
728 })
729 },
730 Err(e) => {
731 tokens.set_current_index(current_index);
732 Err(e)
733 },
734 }
735 },
736 &label_str,
737 )
738 }
739
740 pub fn string(expected: &str) -> TokenParser<String> {
742 let expected_clone = expected.to_string();
743 Self::token(SimpleToken::Ident(String::new()), Some("Ident"))
744 .map(
745 |token| {
746 if let SimpleToken::Ident(value) = token {
747 value
748 } else {
749 stylex_unreachable!()
750 }
751 },
752 Some(".value"),
753 )
754 .where_predicate(
755 move |value| value == &expected_clone,
756 Some(&format!("=== {}", expected)),
757 )
758 }
759
760 pub fn fn_name(name: &str) -> TokenParser<String> {
761 let name_owned = name.to_string();
762 Self::token(SimpleToken::Function(String::new()), Some("Function"))
763 .map(
764 |token| {
765 if let SimpleToken::Function(value) = token {
766 value
767 } else {
768 stylex_unreachable!()
769 }
770 },
771 Some(".value"),
772 )
773 .where_predicate(
774 move |value| value == &name_owned,
775 Some(&format!("=== {}", name)),
776 )
777 }
778
779 pub fn ident() -> TokenParser<SimpleToken> {
781 Self::token(SimpleToken::Ident(String::new()), Some("Ident"))
782 }
783
784 pub fn one_or_more_separated_by<S>(
786 parser: TokenParser<T>,
787 separator: TokenParser<S>,
788 ) -> TokenParser<Vec<T>>
789 where
790 S: Clone + Debug + 'static,
791 {
792 let label = format!(
793 "OneOrMoreSeparatedBy<{}, {}>",
794 parser.label, separator.label
795 );
796 TokenParser::new(
797 move |tokens| {
798 let mut results = Vec::new();
799 let start_index = tokens.current_index;
800
801 match (parser.run)(tokens) {
803 Ok(value) => results.push(value),
804 Err(e) => {
805 tokens.set_current_index(start_index);
806 return Err(e);
807 },
808 }
809
810 loop {
812 let separator_index = tokens.current_index;
813
814 match (separator.run)(tokens) {
816 Ok(_) => {
817 match (parser.run)(tokens) {
819 Ok(value) => results.push(value),
820 Err(_) => {
821 tokens.set_current_index(separator_index);
823 break;
824 },
825 }
826 },
827 Err(_) => {
828 tokens.set_current_index(separator_index);
830 break;
831 },
832 }
833 }
834
835 Ok(results)
836 },
837 &label,
838 )
839 }
840
841 pub fn zero_or_more_separated_by<S>(
843 parser: TokenParser<T>,
844 separator: TokenParser<S>,
845 ) -> TokenParser<Vec<T>>
846 where
847 S: Clone + Debug + 'static,
848 {
849 let label = format!(
850 "ZeroOrMoreSeparatedBy<{}, {}>",
851 parser.label, separator.label
852 );
853 TokenParser::new(
854 move |tokens| {
855 let mut results = Vec::new();
856 let current_index = tokens.current_index;
857
858 match (parser.run)(tokens) {
860 Ok(value) => results.push(value),
861 Err(_) => {
862 tokens.set_current_index(current_index);
863 return Ok(results); },
865 }
866
867 loop {
869 let separator_index = tokens.current_index;
870
871 match (separator.run)(tokens) {
873 Ok(_) => {
874 match (parser.run)(tokens) {
876 Ok(value) => results.push(value),
877 Err(_) => {
878 tokens.set_current_index(separator_index);
880 break;
881 },
882 }
883 },
884 Err(_) => {
885 tokens.set_current_index(separator_index);
887 break;
888 },
889 }
890 }
891
892 Ok(results)
893 },
894 &label,
895 )
896 }
897}
898
899impl<T: Clone + Debug> Display for TokenParser<T> {
900 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
901 write!(f, "{}", self.label)
902 }
903}
904
905#[derive(Clone)]
906pub struct TokenOptionalParser<T: Clone + Debug> {
907 pub parser: TokenParser<T>,
908}
909
910impl<T: Clone + Debug + 'static> TokenOptionalParser<T> {
911 pub fn new(parser: TokenParser<T>) -> Self {
912 Self { parser }
913 }
914
915 pub fn as_token_parser(self) -> TokenParser<Option<T>> {
917 let parser_run = self.parser.run;
918 let label = format!("Optional<{}>", self.parser.label);
919
920 TokenParser::new(
921 move |tokens| {
922 let current_index = tokens.current_index;
923 match (parser_run)(tokens) {
924 Ok(value) => Ok(Some(value)),
925 Err(_) => {
926 tokens.set_current_index(current_index);
927 Ok(None)
928 },
929 }
930 },
931 &label,
932 )
933 }
934}
935
936impl<T: Clone + Debug + 'static> TokenParser<T> {
937 pub fn optional(self) -> TokenParser<Option<T>> {
939 TokenOptionalParser::new(self).as_token_parser()
940 }
941}
942
943#[derive(Clone)]
946pub struct SeparatedParser<T: Clone + Debug + 'static, S: Clone + Debug + 'static> {
947 parser: TokenParser<T>,
948 separator: TokenParser<S>,
949}
950
951impl<T: Clone + Debug + 'static, S: Clone + Debug + 'static> SeparatedParser<T, S> {
952 pub fn one_or_more(self) -> TokenParser<Vec<T>> {
954 TokenParser::one_or_more_separated_by(self.parser, self.separator)
955 }
956
957 pub fn zero_or_more(self) -> TokenParser<Vec<T>> {
959 TokenParser::zero_or_more_separated_by(self.parser, self.separator)
960 }
961
962 pub fn as_token_parser(self) -> TokenParser<Vec<T>> {
964 self.one_or_more()
965 }
966}
967
968#[derive(Clone)]
969pub struct TokenZeroOrMoreParsers<T: Clone + Debug> {
970 parser: TokenParser<T>,
971 separator: Option<TokenParser<()>>,
972}
973
974impl<T: Clone + Debug + 'static> TokenZeroOrMoreParsers<T> {
975 pub fn new(parser: TokenParser<T>, separator: Option<TokenParser<()>>) -> Self {
976 Self { parser, separator }
977 }
978
979 pub fn separated_by<S: Clone + Debug + 'static>(
981 self,
982 separator: TokenParser<S>,
983 ) -> SeparatedParser<T, S> {
984 SeparatedParser {
985 parser: self.parser,
986 separator,
987 }
988 }
989
990 pub fn as_token_parser(self) -> TokenParser<Vec<T>> {
992 let parser = self.parser;
993 let separator = self.separator;
994 let label = format!("ZeroOrMore<{}>", parser.label);
995
996 TokenParser::new(
997 move |tokens| {
998 let mut results = Vec::new();
999
1000 for i in 0.. {
1001 if i > 0 && separator.is_some() {
1002 let current_index = tokens.current_index;
1003 if let Some(ref sep) = separator {
1004 match (sep.run)(tokens) {
1005 Ok(_) => {},
1006 Err(_) => {
1007 tokens.set_current_index(current_index);
1008 return Ok(results);
1009 },
1010 }
1011 }
1012 }
1013
1014 let current_index = tokens.current_index;
1015 match (parser.run)(tokens) {
1016 Ok(value) => results.push(value),
1017 Err(_) => {
1018 tokens.set_current_index(current_index);
1019 return Ok(results);
1020 },
1021 }
1022 }
1023
1024 Ok(results)
1025 },
1026 &label,
1027 )
1028 }
1029}
1030
1031#[derive(Clone)]
1032pub struct TokenOneOrMoreParsers<T: Clone + Debug> {
1033 parser: TokenParser<T>,
1034 separator: Option<TokenParser<()>>,
1035}
1036
1037#[derive(Clone)]
1038pub struct SetOfParsers<T: Clone + Debug> {
1039 parsers: Vec<TokenParser<T>>,
1040}
1041
1042impl<T: Clone + Debug + 'static> SetOfParsers<T> {
1043 pub fn new(parsers: Vec<TokenParser<T>>) -> Self {
1044 Self { parsers }
1045 }
1046
1047 pub fn separated_by<S: Clone + Debug + 'static>(
1049 self,
1050 separator: TokenParser<S>,
1051 ) -> TokenParser<Vec<T>> {
1052 let parsers = self.parsers;
1053 TokenParser::new(
1054 move |tokens| {
1055 let start_index = tokens.current_index;
1056 let mut results = vec![None; parsers.len()];
1057 let mut used_indices = std::collections::HashSet::new();
1058 let mut errors = Vec::new();
1059
1060 for position in 0..parsers.len() {
1062 let mut found = false;
1063 let mut position_errors = Vec::new();
1064
1065 if position > 0 {
1067 match (separator.run)(tokens) {
1068 Ok(_) => {
1069 },
1071 Err(e) => {
1072 tokens.set_current_index(start_index);
1074 return Err(CssParseError::ParseError {
1075 message: format!(
1076 "SetOf: Expected separator before position {}: {}",
1077 position, e
1078 ),
1079 });
1080 },
1081 }
1082 }
1083
1084 for (parser_index, parser) in parsers.iter().enumerate() {
1086 if used_indices.contains(&parser_index) {
1087 continue;
1088 }
1089
1090 let before_attempt = tokens.current_index;
1091 match (parser.run)(tokens) {
1092 Ok(value) => {
1093 results[parser_index] = Some(value);
1094 used_indices.insert(parser_index);
1095 found = true;
1096 break;
1097 },
1098 Err(e) => {
1099 tokens.set_current_index(before_attempt);
1100 position_errors.push(format!("Parser {}: {}", parser_index, e));
1101 },
1102 }
1103 }
1104
1105 if !found {
1106 errors.extend(position_errors);
1107 tokens.set_current_index(start_index);
1108 return Err(CssParseError::ParseError {
1109 message: format!(
1110 "SetOf failed at position {}: {}",
1111 position,
1112 errors.join("; ")
1113 ),
1114 });
1115 }
1116 }
1117
1118 let final_results: Result<Vec<T>, String> = results
1120 .into_iter()
1121 .enumerate()
1122 .map(|(i, opt)| opt.ok_or_else(|| format!("Parser {} did not match", i)))
1123 .collect();
1124
1125 match final_results {
1126 Ok(values) => Ok(values),
1127 Err(err) => {
1128 tokens.set_current_index(start_index);
1129 Err(CssParseError::ParseError {
1130 message: format!("SetOf incomplete: {}", err),
1131 })
1132 },
1133 }
1134 },
1135 "setOfSeparatedBy",
1136 )
1137 }
1138
1139 pub fn as_token_parser(self) -> TokenParser<Vec<T>> {
1141 let parsers = self.parsers;
1142 TokenParser::new(
1143 move |tokens| {
1144 let start_index = tokens.current_index;
1145 let mut results = vec![None; parsers.len()];
1146 let mut used_indices = std::collections::HashSet::new();
1147 let mut errors = Vec::new();
1148
1149 for position in 0..parsers.len() {
1151 let mut found = false;
1152 let mut position_errors = Vec::new();
1153
1154 for (parser_index, parser) in parsers.iter().enumerate() {
1156 if used_indices.contains(&parser_index) {
1157 continue;
1158 }
1159
1160 let before_attempt = tokens.current_index;
1161 match (parser.run)(tokens) {
1162 Ok(value) => {
1163 results[parser_index] = Some(value);
1164 used_indices.insert(parser_index);
1165 found = true;
1166 break;
1167 },
1168 Err(e) => {
1169 tokens.set_current_index(before_attempt);
1170 position_errors.push(format!("Parser {}: {}", parser_index, e));
1171 },
1172 }
1173 }
1174
1175 if !found {
1176 errors.extend(position_errors);
1177 tokens.set_current_index(start_index);
1178 return Err(CssParseError::ParseError {
1179 message: format!(
1180 "SetOf failed at position {}: {}",
1181 position,
1182 errors.join("; ")
1183 ),
1184 });
1185 }
1186 }
1187
1188 let final_results: Result<Vec<T>, String> = results
1190 .into_iter()
1191 .enumerate()
1192 .map(|(i, opt)| opt.ok_or_else(|| format!("Parser {} did not match", i)))
1193 .collect();
1194
1195 match final_results {
1196 Ok(values) => Ok(values),
1197 Err(err) => {
1198 tokens.set_current_index(start_index);
1199 Err(CssParseError::ParseError {
1200 message: format!("SetOf incomplete: {}", err),
1201 })
1202 },
1203 }
1204 },
1205 "setOf",
1206 )
1207 }
1208}
1209
1210#[derive(Clone)]
1211pub struct SequenceParsers<T: Clone + Debug> {
1212 parsers: Vec<TokenParser<T>>,
1213}
1214
1215impl<T: Clone + Debug + 'static> SequenceParsers<T> {
1216 pub fn new(parsers: Vec<TokenParser<T>>) -> Self {
1217 Self { parsers }
1218 }
1219
1220 pub fn as_token_parser(self) -> TokenParser<Vec<T>> {
1222 let parsers = self.parsers;
1223 TokenParser::new(
1224 move |tokens| {
1225 let current_index = tokens.current_index;
1226 let mut results = Vec::new();
1227
1228 for parser in &parsers {
1229 match (parser.run)(tokens) {
1230 Ok(value) => results.push(value),
1231 Err(e) => {
1232 tokens.set_current_index(current_index);
1233 return Err(e);
1234 },
1235 }
1236 }
1237
1238 Ok(results)
1239 },
1240 "sequence",
1241 )
1242 }
1243
1244 pub fn separated_by<S: Clone + Debug + 'static>(
1248 self,
1249 separator: TokenParser<S>,
1250 ) -> TokenParser<Vec<T>> {
1251 let parsers = self.parsers;
1252 TokenParser::new(
1253 move |tokens| {
1254 let current_index = tokens.current_index;
1255 let mut results = Vec::new();
1256
1257 for (i, parser) in parsers.iter().enumerate() {
1258 if i > 0 {
1260 let separator_index = tokens.current_index;
1261
1262 let separator_consumed = (separator.run)(tokens).is_ok();
1264
1265 match (parser.run)(tokens) {
1267 Ok(value) => {
1268 results.push(value);
1269 },
1271 Err(_) => {
1272 if separator_consumed {
1274 tokens.set_current_index(separator_index);
1277 match (parser.run)(tokens) {
1278 Ok(value) => {
1279 results.push(value);
1280 },
1282 Err(e) => {
1283 tokens.set_current_index(current_index);
1285 return Err(e);
1286 },
1287 }
1288 } else {
1289 tokens.set_current_index(current_index);
1291 return Err(CssParseError::ParseError {
1292 message: format!(
1293 "SequenceSeparatedBy: Parser {} failed and no separator found",
1294 i
1295 ),
1296 });
1297 }
1298 },
1299 }
1300 } else {
1301 match (parser.run)(tokens) {
1303 Ok(value) => results.push(value),
1304 Err(e) => {
1305 tokens.set_current_index(current_index);
1306 return Err(e);
1307 },
1308 }
1309 }
1310 }
1311
1312 Ok(results)
1313 },
1314 "sequenceSeparatedBy",
1315 )
1316 }
1317}
1318
1319impl<T: Clone + Debug + 'static> TokenOneOrMoreParsers<T> {
1320 pub fn new(parser: TokenParser<T>, separator: Option<TokenParser<()>>) -> Self {
1321 Self { parser, separator }
1322 }
1323
1324 pub fn separated_by<S: Clone + Debug + 'static>(
1326 self,
1327 separator: TokenParser<S>,
1328 ) -> SeparatedParser<T, S> {
1329 SeparatedParser {
1330 parser: self.parser,
1331 separator,
1332 }
1333 }
1334
1335 pub fn as_token_parser(self) -> TokenParser<Vec<T>> {
1337 let parser = self.parser;
1338 let separator = self.separator;
1339 let label = format!("OneOrMore<{}>", parser.label);
1340
1341 TokenParser::new(
1342 move |tokens| {
1343 let mut results = Vec::new();
1344
1345 for i in 0.. {
1346 if i > 0 && separator.is_some() {
1347 let current_index = tokens.current_index;
1348 if let Some(ref sep) = separator {
1349 match (sep.run)(tokens) {
1350 Ok(_) => {},
1351 Err(_) => {
1352 tokens.set_current_index(current_index);
1353 return Ok(results);
1354 },
1355 }
1356 }
1357 }
1358
1359 let current_index = tokens.current_index;
1360 match (parser.run)(tokens) {
1361 Ok(value) => results.push(value),
1362 Err(e) => {
1363 if i == 0 {
1364 tokens.set_current_index(current_index);
1365 return Err(e);
1366 }
1367 return Ok(results);
1368 },
1369 }
1370 }
1371
1372 Ok(results)
1373 },
1374 &label,
1375 )
1376 }
1377}
1378
1379pub struct Tokens;
1381
1382impl Tokens {
1383 pub fn ident() -> TokenParser<SimpleToken> {
1385 TokenParser::<SimpleToken>::token(SimpleToken::Ident(String::new()), Some("Ident"))
1386 }
1387
1388 pub fn comma() -> TokenParser<SimpleToken> {
1390 TokenParser::<SimpleToken>::token(SimpleToken::Comma, Some("Comma"))
1391 }
1392
1393 pub fn colon() -> TokenParser<SimpleToken> {
1395 TokenParser::<SimpleToken>::token(SimpleToken::Colon, Some("Colon"))
1396 }
1397
1398 pub fn semicolon() -> TokenParser<SimpleToken> {
1400 TokenParser::<SimpleToken>::token(SimpleToken::Semicolon, Some("Semicolon"))
1401 }
1402
1403 pub fn open_paren() -> TokenParser<SimpleToken> {
1405 TokenParser::<SimpleToken>::token(SimpleToken::LeftParen, Some("OpenParen"))
1406 }
1407
1408 pub fn close_paren() -> TokenParser<SimpleToken> {
1410 TokenParser::<SimpleToken>::token(SimpleToken::RightParen, Some("CloseParen"))
1411 }
1412
1413 pub fn open_square() -> TokenParser<SimpleToken> {
1415 TokenParser::<SimpleToken>::token(SimpleToken::LeftBracket, Some("OpenSquare"))
1416 }
1417
1418 pub fn close_square() -> TokenParser<SimpleToken> {
1420 TokenParser::<SimpleToken>::token(SimpleToken::RightBracket, Some("CloseSquare"))
1421 }
1422
1423 pub fn open_curly() -> TokenParser<SimpleToken> {
1425 TokenParser::<SimpleToken>::token(SimpleToken::LeftBrace, Some("OpenCurly"))
1426 }
1427
1428 pub fn close_curly() -> TokenParser<SimpleToken> {
1430 TokenParser::<SimpleToken>::token(SimpleToken::RightBrace, Some("CloseCurly"))
1431 }
1432
1433 pub fn number() -> TokenParser<SimpleToken> {
1435 TokenParser::<SimpleToken>::token(SimpleToken::Number(0.0), Some("Number"))
1436 }
1437
1438 pub fn percentage() -> TokenParser<SimpleToken> {
1440 TokenParser::<SimpleToken>::token(SimpleToken::Percentage(0.0), Some("Percentage"))
1441 }
1442
1443 pub fn dimension() -> TokenParser<SimpleToken> {
1445 TokenParser::<SimpleToken>::token(
1446 SimpleToken::Dimension {
1447 value: 0.0,
1448 unit: String::new(),
1449 },
1450 Some("Dimension"),
1451 )
1452 }
1453
1454 pub fn string() -> TokenParser<SimpleToken> {
1456 TokenParser::<SimpleToken>::token(SimpleToken::String(String::new()), Some("String"))
1457 }
1458
1459 pub fn function() -> TokenParser<SimpleToken> {
1461 TokenParser::<SimpleToken>::token(SimpleToken::Function(String::new()), Some("Function"))
1462 }
1463
1464 pub fn hash() -> TokenParser<SimpleToken> {
1466 TokenParser::<SimpleToken>::token(SimpleToken::Hash(String::new()), Some("Hash"))
1467 }
1468
1469 pub fn delim(ch: char) -> TokenParser<SimpleToken> {
1471 TokenParser::<SimpleToken>::token(SimpleToken::Delim(ch), Some("Delim"))
1472 }
1473
1474 pub fn whitespace() -> TokenParser<SimpleToken> {
1476 TokenParser::<SimpleToken>::token(SimpleToken::Whitespace, Some("Whitespace"))
1477 }
1478
1479 pub fn at_keyword() -> TokenParser<SimpleToken> {
1481 TokenParser::<SimpleToken>::token(SimpleToken::AtKeyword(String::new()), Some("AtKeyword"))
1482 }
1483}
1484
1485impl<T: Clone + Debug + 'static> TokenParser<T> {
1486 pub fn tokens() -> Tokens {
1487 Tokens
1488 }
1489
1490 pub fn mixed_sequence<U: Clone + Debug + 'static>(
1493 parsers: Vec<Either<TokenParser<U>, TokenParser<Option<U>>>>,
1494 ) -> MixedSequenceBuilder<U> {
1495 MixedSequenceBuilder::new(parsers)
1496 }
1497}
1498
1499pub struct MixedSequenceBuilder<T: Clone + Debug + 'static> {
1501 parsers: Vec<Either<TokenParser<T>, TokenParser<Option<T>>>>,
1502}
1503
1504impl<T: Clone + Debug + 'static> MixedSequenceBuilder<T> {
1505 pub fn new(parsers: Vec<Either<TokenParser<T>, TokenParser<Option<T>>>>) -> Self {
1506 Self { parsers }
1507 }
1508
1509 pub fn separated_by<S: Clone + Debug + 'static>(
1511 self,
1512 separator: TokenParser<S>,
1513 ) -> TokenParser<Vec<Option<T>>> {
1514 TokenParser::<Vec<Option<T>>>::flexible_sequence_separated_by(self.parsers, separator)
1515 }
1516}
1517
1518pub fn peek_tokens(css: &str, count: usize) -> Vec<SimpleToken> {
1521 let mut tokens = TokenList::new(css);
1522 let mut result = Vec::new();
1523 for _ in 0..count {
1524 if let Ok(Some(token)) = tokens.peek() {
1525 result.push(token);
1526 let _ = tokens.consume_next_token();
1528 } else {
1529 break;
1530 }
1531 }
1532 result
1533}
1534
1535#[cfg(test)]
1536mod tests {
1537 use super::*;
1538
1539 #[test]
1540 fn test_always_parser() {
1541 let parser = TokenParser::always(42);
1542 let result = parser.parse("anything").unwrap();
1543 assert_eq!(result, 42);
1544 }
1545
1546 #[test]
1547 fn test_never_parser() {
1548 let parser: TokenParser<i32> = TokenParser::never();
1549 assert!(parser.parse("anything").is_err());
1550 }
1551
1552 #[test]
1553 fn test_map_parser() {
1554 let parser = TokenParser::always(10).map(|x| x * 2, Some("double"));
1555 let result = parser.parse("anything").unwrap();
1556 assert_eq!(result, 20);
1557 }
1558
1559 #[test]
1560 fn test_flat_map_parser() {
1561 let parser = TokenParser::always(5).flat_map(|x| TokenParser::always(x + 1), Some("add_one"));
1562 let result = parser.parse("anything").unwrap();
1563 assert_eq!(result, 6);
1564 }
1565
1566 #[test]
1567 fn test_optional_parser() {
1568 let success_parser = TokenParser::always(42).optional();
1569 let result = success_parser.parse("anything").unwrap();
1570 assert_eq!(result, Some(42));
1571
1572 let fail_parser: TokenParser<Option<i32>> = TokenParser::<i32>::never().optional();
1573 let result = fail_parser.parse("anything").unwrap();
1574 assert_eq!(result, None);
1575 }
1576
1577 #[test]
1578 fn test_where_predicate_parser() {
1579 let parser = TokenParser::always(10).where_predicate(|&x| x > 5, Some("greater_than_5"));
1580 let result = parser.parse("anything").unwrap();
1581 assert_eq!(result, 10);
1582
1583 let parser = TokenParser::always(3).where_predicate(|&x| x > 5, Some("greater_than_5"));
1584 assert!(parser.parse("anything").is_err());
1585 }
1586
1587 #[test]
1588 fn test_one_of_parser() {
1589 let parser = TokenParser::one_of(vec![
1590 TokenParser::<i32>::never(),
1591 TokenParser::always(42),
1592 TokenParser::always(24),
1593 ]);
1594 let result = parser.parse("anything").unwrap();
1595 assert_eq!(result, 42); }
1597
1598 #[test]
1599 fn test_sequence_parser() {
1600 let parser = TokenParser::<i32>::sequence(vec![
1601 TokenParser::always(1),
1602 TokenParser::always(2),
1603 TokenParser::always(3),
1604 ]);
1605 let result = parser.parse("anything").unwrap();
1606 assert_eq!(result, vec![1, 2, 3]);
1607 }
1608
1609 #[test]
1610 fn test_or_parser() {
1611 let parser1 = TokenParser::always(1);
1612 let parser2 = TokenParser::always(2);
1613 let combined = parser1.or(parser2);
1614
1615 let result = combined.parse("anything").unwrap();
1616 assert!(matches!(result, Either::Left(1)));
1617 }
1618
1619 #[test]
1620 fn test_parse_to_end() {
1621 let parser = TokenParser::always(42);
1622 let result = parser.parse_to_end("").unwrap();
1624 assert_eq!(result, 42);
1625 }
1626
1627 #[test]
1628 fn test_label_preservation() {
1629 let parser = TokenParser::always(42);
1630 assert!(parser.label.contains("Always"));
1631
1632 let mapped = parser.map(|x| x * 2, Some("double"));
1633 assert!(mapped.label.contains("map(double)"));
1634 }
1635}