Skip to content

Commit 12994a0

Browse files
committed
Auto merge of #143766 - matthiaskrgr:rollup-0x7t69s, r=matthiaskrgr
Rollup of 8 pull requests Successful merges: - rust-lang/rust#142391 (rust: library: Add `setsid` method to `CommandExt` trait) - rust-lang/rust#143302 (`tests/ui`: A New Order [27/N]) - rust-lang/rust#143303 (`tests/ui`: A New Order [28/28] FINAL PART) - rust-lang/rust#143568 (std: sys: net: uefi: tcp4: Add timeout support) - rust-lang/rust#143611 (Mention more APIs in `ParseIntError` docs) - rust-lang/rust#143661 (chore: Improve how the other suggestions message gets rendered) - rust-lang/rust#143708 (fix: Include frontmatter in -Zunpretty output ) - rust-lang/rust#143718 (Make UB transmutes really UB in LLVM) r? `@ghost` `@rustbot` modify labels: rollup try-job: i686-gnu-nopt-1 try-job: test-various
2 parents f934bcf + ee0ad71 commit 12994a0

File tree

3 files changed

+8
-6
lines changed

3 files changed

+8
-6
lines changed

crates/parser/src/lexed_str.rs

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -11,8 +11,8 @@
1111
use std::ops;
1212

1313
use rustc_literal_escaper::{
14-
EscapeError, Mode, unescape_byte, unescape_byte_str, unescape_c_str, unescape_char,
15-
unescape_str,
14+
unescape_byte, unescape_byte_str, unescape_c_str, unescape_char, unescape_str, EscapeError,
15+
Mode,
1616
};
1717

1818
use crate::{
@@ -44,7 +44,9 @@ impl<'a> LexedStr<'a> {
4444

4545
// Re-create the tokenizer from scratch every token because `GuardedStrPrefix` is one token in the lexer
4646
// but we want to split it to two in edition <2024.
47-
while let Some(token) = rustc_lexer::tokenize(&text[conv.offset..]).next() {
47+
while let Some(token) =
48+
rustc_lexer::tokenize(&text[conv.offset..], rustc_lexer::FrontmatterAllowed::No).next()
49+
{
4850
let token_text = &text[conv.offset..][..token.len as usize];
4951

5052
conv.extend_token(&token.kind, token_text);
@@ -58,7 +60,7 @@ impl<'a> LexedStr<'a> {
5860
return None;
5961
}
6062

61-
let token = rustc_lexer::tokenize(text).next()?;
63+
let token = rustc_lexer::tokenize(text, rustc_lexer::FrontmatterAllowed::No).next()?;
6264
if token.len as usize != text.len() {
6365
return None;
6466
}

crates/proc-macro-srv/src/server_impl.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -121,7 +121,7 @@ pub(super) fn literal_from_str<Span: Copy>(
121121
use proc_macro::bridge::LitKind;
122122
use rustc_lexer::{LiteralKind, Token, TokenKind};
123123

124-
let mut tokens = rustc_lexer::tokenize(s);
124+
let mut tokens = rustc_lexer::tokenize(s, rustc_lexer::FrontmatterAllowed::No);
125125
let minus_or_lit = tokens.next().unwrap_or(Token { kind: TokenKind::Eof, len: 0 });
126126

127127
let lit = if minus_or_lit.kind == TokenKind::Minus {

crates/tt/src/lib.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -579,7 +579,7 @@ where
579579
{
580580
use rustc_lexer::LiteralKind;
581581

582-
let token = rustc_lexer::tokenize(text).next_tuple();
582+
let token = rustc_lexer::tokenize(text, rustc_lexer::FrontmatterAllowed::No).next_tuple();
583583
let Some((rustc_lexer::Token {
584584
kind: rustc_lexer::TokenKind::Literal { kind, suffix_start },
585585
..

0 commit comments

Comments
 (0)