Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions packages/cxx-frontend/src/TokenKind.ts
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ export enum TokenKind {
UTF8_STRING_LITERAL,
WIDE_STRING_LITERAL,
PP_INTERNAL_VARIABLE,
CODE_COMPLETION,
AMP_AMP,
AMP_EQUAL,
AMP,
Expand Down
1 change: 1 addition & 0 deletions packages/cxx-gen-ast/src/tokens.ts
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ export const BASE_TOKENS: string[] = [
"UTF8_STRING_LITERAL",
"WIDE_STRING_LITERAL",
"PP_INTERNAL_VARIABLE",
"CODE_COMPLETION",
];

export const OPERATORS: Array<[kind: string, spelling: string]> = [
Expand Down
2 changes: 1 addition & 1 deletion src/frontend/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ aux_source_directory(cxx SOURCES)

add_executable(cxx ${SOURCES})

target_link_libraries(cxx PRIVATE cxx-parser cxx-lsp)
target_link_libraries(cxx PRIVATE cxx-lsp)

if(EMSCRIPTEN)
target_link_options(cxx PRIVATE
Expand Down
74 changes: 57 additions & 17 deletions src/parser/cxx/preprocessor.cc
Original file line number Diff line number Diff line change
Expand Up @@ -637,24 +637,37 @@ struct SourceFile {
initLineMap();
}

void getTokenStartPosition(unsigned offset, unsigned *line, unsigned *column,
std::string_view *fileName) const {
[[nodiscard]] auto getTokenStartPosition(unsigned offset) const
-> SourcePosition {
auto it = std::lower_bound(lines.cbegin(), lines.cend(),
static_cast<int>(offset));
if (*it != static_cast<int>(offset)) --it;

assert(*it <= int(offset));

if (line) *line = int(std::distance(cbegin(lines), it) + 1);
auto line = std::uint32_t(std::distance(cbegin(lines), it) + 1);

if (column) {
const auto start = cbegin(source) + *it;
const auto end = cbegin(source) + offset;
const auto start = cbegin(source) + *it;
const auto end = cbegin(source) + offset;

*column = utf8::unchecked::distance(start, end) + 1;
}
const auto column =
std::uint32_t(utf8::unchecked::distance(start, end) + 1);

if (fileName) *fileName = this->fileName;
return SourcePosition{fileName, line, column};
}

[[nodiscard]] auto offsetAt(std::uint32_t line, std::uint32_t column) const
-> std::uint32_t {
if (line == 0 && column == 0) return 0;
if (line > lines.size()) return static_cast<std::uint32_t>(source.size());
const auto start = source.data();
const auto end = start + source.size();
const auto offsetOfTheLine = lines[line - 1];
auto it = start + offsetOfTheLine;
for (std::uint32_t i = 1; i < column; ++i) {
utf8::unchecked::next(it);
}
return static_cast<std::uint32_t>(it - start);
}

private:
Expand Down Expand Up @@ -713,6 +726,8 @@ struct Preprocessor::Private {
};
std::vector<Dep> dependencies_;
std::function<auto()->std::optional<PreprocessingState>> continuation_;
std::optional<SourcePosition> codeCompletionLocation_;
std::uint32_t codeCompletionOffset_ = 0;
int localCount_ = 0;

int counter_ = 0;
Expand Down Expand Up @@ -1593,6 +1608,18 @@ void Preprocessor::Private::finalizeToken(std::vector<Token> &tokens,
const auto fileId = tk->sourceFile;
TokenValue value{};

if (tk->sourceFile == 1 && codeCompletionLocation_.has_value()) {
if (codeCompletionOffset_ < tk->offset ||
(codeCompletionOffset_ >= tk->offset &&
codeCompletionOffset_ < tk->offset + tk->length)) {
auto &completionToken =
tokens.emplace_back(TokenKind::T_CODE_COMPLETION, tk->offset, 0);
completionToken.setFileId(fileId);

codeCompletionLocation_ = std::nullopt;
}
}

switch (tk->kind) {
case TokenKind::T_IDENTIFIER: {
kind = Lexer::classifyKeyword(tk->text);
Expand Down Expand Up @@ -2987,6 +3014,10 @@ void Preprocessor::beginPreprocessing(std::string source, std::string fileName,
if (tokens.empty()) {
tokens.emplace_back(TokenKind::T_ERROR);
}

if (auto loc = d->codeCompletionLocation_) {
d->codeCompletionOffset_ = sourceFile->offsetAt(loc->line, loc->column);
}
}

void Preprocessor::endPreprocessing(std::vector<Token> &tokens) {
Expand All @@ -3002,6 +3033,16 @@ void Preprocessor::endPreprocessing(std::vector<Token> &tokens) {

// place the EOF token at the end of the main source file
const auto offset = d->sourceFiles_[mainSourceFileId - 1]->source.size();

if (d->codeCompletionLocation_.has_value()) {
auto sourceFile = d->sourceFiles_[0].get();

auto &tk = tokens.emplace_back(TokenKind::T_CODE_COMPLETION, offset, 0);
tk.setFileId(mainSourceFileId);

d->codeCompletionLocation_ = std::nullopt;
}

auto &tk = tokens.emplace_back(TokenKind::T_EOF_SYMBOL, offset);
tk.setFileId(mainSourceFileId);
}
Expand Down Expand Up @@ -3165,10 +3206,7 @@ auto Preprocessor::tokenStartPosition(const Token &token) const
}

auto &sourceFile = *d->sourceFiles_[token.fileId() - 1];
SourcePosition pos;
sourceFile.getTokenStartPosition(token.offset(), &pos.line, &pos.column,
&pos.fileName);
return pos;
return sourceFile.getTokenStartPosition(token.offset());
}

auto Preprocessor::tokenEndPosition(const Token &token) const
Expand All @@ -3179,10 +3217,7 @@ auto Preprocessor::tokenEndPosition(const Token &token) const

auto &sourceFile = *d->sourceFiles_[token.fileId() - 1];

SourcePosition pos;
sourceFile.getTokenStartPosition(token.offset() + token.length(), &pos.line,
&pos.column, &pos.fileName);
return pos;
return sourceFile.getTokenStartPosition(token.offset() + token.length());
}

auto Preprocessor::getTextLine(const Token &token) const -> std::string_view {
Expand Down Expand Up @@ -3215,6 +3250,11 @@ auto Preprocessor::resolve(const Include &include, bool isIncludeNext) const
return d->resolve(include, isIncludeNext);
}

void Preprocessor::requestCodeCompletionAt(std::uint32_t line,
std::uint32_t column) {
d->codeCompletionLocation_ = SourcePosition{{}, line, column};
}

void DefaultPreprocessorState::operator()(const ProcessingComplete &) {
done = true;
}
Expand Down
2 changes: 2 additions & 0 deletions src/parser/cxx/preprocessor.h
Original file line number Diff line number Diff line change
Expand Up @@ -115,6 +115,8 @@ class Preprocessor {
[[nodiscard]] auto resolve(const Include &include, bool isIncludeNext) const
-> std::optional<std::string>;

void requestCodeCompletionAt(std::uint32_t line, std::uint32_t column);

void squeeze();

private:
Expand Down
3 changes: 2 additions & 1 deletion src/parser/cxx/token_fwd.h
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,8 @@ class Token;
V(UTF32_STRING_LITERAL, "<utf32_string_literal>") \
V(UTF8_STRING_LITERAL, "<utf8_string_literal>") \
V(WIDE_STRING_LITERAL, "<wide_string_literal>") \
V(PP_INTERNAL_VARIABLE, "<PP_INTERNAL_VARIABLE>")
V(PP_INTERNAL_VARIABLE, "<pp_internal_variable>") \
V(CODE_COMPLETION, "<code_completion>")

#define FOR_EACH_OPERATOR(V) \
V(AMP_AMP, "&&") \
Expand Down