1- use std:: ops:: Range ;
2-
31use diagnostics:: make_unclosed_delims_error;
42use rustc_ast:: ast:: { self , AttrStyle } ;
53use rustc_ast:: token:: { self , CommentKind , Delimiter , IdentIsRaw , Token , TokenKind } ;
@@ -10,7 +8,7 @@ use rustc_errors::{Applicability, Diag, DiagCtxtHandle, StashKey};
108use rustc_lexer:: {
119 Base , Cursor , DocStyle , FrontmatterAllowed , LiteralKind , RawStrError , is_whitespace,
1210} ;
13- use rustc_literal_escaper:: { EscapeError , Mode , unescape_mixed , unescape_unicode } ;
11+ use rustc_literal_escaper:: { EscapeError , Mode , unescape_for_errors } ;
1412use rustc_session:: lint:: BuiltinLintDiag ;
1513use rustc_session:: lint:: builtin:: {
1614 RUST_2021_PREFIXES_INCOMPATIBLE_SYNTAX , RUST_2024_GUARDED_STRING_INCOMPATIBLE_SYNTAX ,
@@ -617,7 +615,7 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
617615 }
618616 err. emit ( )
619617 }
620- self . cook_unicode ( token:: Char , Mode :: Char , start, end, 1 , 1 ) // ' '
618+ self . cook_quoted ( token:: Char , Mode :: Char , start, end, 1 , 1 ) // ' '
621619 }
622620 rustc_lexer:: LiteralKind :: Byte { terminated } => {
623621 if !terminated {
@@ -629,7 +627,7 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
629627 . with_code ( E0763 )
630628 . emit ( )
631629 }
632- self . cook_unicode ( token:: Byte , Mode :: Byte , start, end, 2 , 1 ) // b' '
630+ self . cook_quoted ( token:: Byte , Mode :: Byte , start, end, 2 , 1 ) // b' '
633631 }
634632 rustc_lexer:: LiteralKind :: Str { terminated } => {
635633 if !terminated {
@@ -641,7 +639,7 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
641639 . with_code ( E0765 )
642640 . emit ( )
643641 }
644- self . cook_unicode ( token:: Str , Mode :: Str , start, end, 1 , 1 ) // " "
642+ self . cook_quoted ( token:: Str , Mode :: Str , start, end, 1 , 1 ) // " "
645643 }
646644 rustc_lexer:: LiteralKind :: ByteStr { terminated } => {
647645 if !terminated {
@@ -653,7 +651,7 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
653651 . with_code ( E0766 )
654652 . emit ( )
655653 }
656- self . cook_unicode ( token:: ByteStr , Mode :: ByteStr , start, end, 2 , 1 ) // b" "
654+ self . cook_quoted ( token:: ByteStr , Mode :: ByteStr , start, end, 2 , 1 ) // b" "
657655 }
658656 rustc_lexer:: LiteralKind :: CStr { terminated } => {
659657 if !terminated {
@@ -665,13 +663,13 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
665663 . with_code ( E0767 )
666664 . emit ( )
667665 }
668- self . cook_mixed ( token:: CStr , Mode :: CStr , start, end, 2 , 1 ) // c" "
666+ self . cook_quoted ( token:: CStr , Mode :: CStr , start, end, 2 , 1 ) // c" "
669667 }
670668 rustc_lexer:: LiteralKind :: RawStr { n_hashes } => {
671669 if let Some ( n_hashes) = n_hashes {
672670 let n = u32:: from ( n_hashes) ;
673671 let kind = token:: StrRaw ( n_hashes) ;
674- self . cook_unicode ( kind, Mode :: RawStr , start, end, 2 + n, 1 + n) // r##" "##
672+ self . cook_quoted ( kind, Mode :: RawStr , start, end, 2 + n, 1 + n) // r##" "##
675673 } else {
676674 self . report_raw_str_error ( start, 1 ) ;
677675 }
@@ -680,7 +678,7 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
680678 if let Some ( n_hashes) = n_hashes {
681679 let n = u32:: from ( n_hashes) ;
682680 let kind = token:: ByteStrRaw ( n_hashes) ;
683- self . cook_unicode ( kind, Mode :: RawByteStr , start, end, 3 + n, 1 + n) // br##" "##
681+ self . cook_quoted ( kind, Mode :: RawByteStr , start, end, 3 + n, 1 + n) // br##" "##
684682 } else {
685683 self . report_raw_str_error ( start, 2 ) ;
686684 }
@@ -689,7 +687,7 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
689687 if let Some ( n_hashes) = n_hashes {
690688 let n = u32:: from ( n_hashes) ;
691689 let kind = token:: CStrRaw ( n_hashes) ;
692- self . cook_unicode ( kind, Mode :: RawCStr , start, end, 3 + n, 1 + n) // cr##" "##
690+ self . cook_quoted ( kind, Mode :: RawCStr , start, end, 3 + n, 1 + n) // cr##" "##
693691 } else {
694692 self . report_raw_str_error ( start, 2 ) ;
695693 }
@@ -1006,40 +1004,36 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
10061004 self . dcx ( ) . emit_fatal ( errors:: TooManyHashes { span : self . mk_sp ( start, self . pos ) , num } ) ;
10071005 }
10081006
1009- fn cook_common (
1007+ fn cook_quoted (
10101008 & self ,
10111009 mut kind : token:: LitKind ,
10121010 mode : Mode ,
10131011 start : BytePos ,
10141012 end : BytePos ,
10151013 prefix_len : u32 ,
10161014 postfix_len : u32 ,
1017- unescape : fn ( & str , Mode , & mut dyn FnMut ( Range < usize > , Result < ( ) , EscapeError > ) ) ,
10181015 ) -> ( token:: LitKind , Symbol ) {
10191016 let content_start = start + BytePos ( prefix_len) ;
10201017 let content_end = end - BytePos ( postfix_len) ;
10211018 let lit_content = self . str_from_to ( content_start, content_end) ;
1022- unescape ( lit_content, mode, & mut |range, result| {
1023- // Here we only check for errors. The actual unescaping is done later.
1024- if let Err ( err) = result {
1025- let span_with_quotes = self . mk_sp ( start, end) ;
1026- let ( start, end) = ( range. start as u32 , range. end as u32 ) ;
1027- let lo = content_start + BytePos ( start) ;
1028- let hi = lo + BytePos ( end - start) ;
1029- let span = self . mk_sp ( lo, hi) ;
1030- let is_fatal = err. is_fatal ( ) ;
1031- if let Some ( guar) = emit_unescape_error (
1032- self . dcx ( ) ,
1033- lit_content,
1034- span_with_quotes,
1035- span,
1036- mode,
1037- range,
1038- err,
1039- ) {
1040- assert ! ( is_fatal) ;
1041- kind = token:: Err ( guar) ;
1042- }
1019+ unescape_for_errors ( lit_content, mode, |range, err| {
1020+ let span_with_quotes = self . mk_sp ( start, end) ;
1021+ let ( start, end) = ( range. start as u32 , range. end as u32 ) ;
1022+ let lo = content_start + BytePos ( start) ;
1023+ let hi = lo + BytePos ( end - start) ;
1024+ let span = self . mk_sp ( lo, hi) ;
1025+ let is_fatal = err. is_fatal ( ) ;
1026+ if let Some ( guar) = emit_unescape_error (
1027+ self . dcx ( ) ,
1028+ lit_content,
1029+ span_with_quotes,
1030+ span,
1031+ mode,
1032+ range,
1033+ err,
1034+ ) {
1035+ assert ! ( is_fatal) ;
1036+ kind = token:: Err ( guar) ;
10431037 }
10441038 } ) ;
10451039
@@ -1052,34 +1046,6 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
10521046 } ;
10531047 ( kind, sym)
10541048 }
1055-
1056- fn cook_unicode (
1057- & self ,
1058- kind : token:: LitKind ,
1059- mode : Mode ,
1060- start : BytePos ,
1061- end : BytePos ,
1062- prefix_len : u32 ,
1063- postfix_len : u32 ,
1064- ) -> ( token:: LitKind , Symbol ) {
1065- self . cook_common ( kind, mode, start, end, prefix_len, postfix_len, |src, mode, callback| {
1066- unescape_unicode ( src, mode, & mut |span, result| callback ( span, result. map ( drop) ) )
1067- } )
1068- }
1069-
1070- fn cook_mixed (
1071- & self ,
1072- kind : token:: LitKind ,
1073- mode : Mode ,
1074- start : BytePos ,
1075- end : BytePos ,
1076- prefix_len : u32 ,
1077- postfix_len : u32 ,
1078- ) -> ( token:: LitKind , Symbol ) {
1079- self . cook_common ( kind, mode, start, end, prefix_len, postfix_len, |src, mode, callback| {
1080- unescape_mixed ( src, mode, & mut |span, result| callback ( span, result. map ( drop) ) )
1081- } )
1082- }
10831049}
10841050
10851051pub fn nfc_normalize ( string : & str ) -> Symbol {
0 commit comments