Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 13 additions & 3 deletions editor/src/index.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import { FileFormat, Message, MessageType } from "../../shared";
import { getSemanticColors } from "./semanticColors";
import { defaultToMarkdown, markdown, ThemeStyle, WaterproofEditor, WaterproofEditorConfig } from "@impermeable/waterproof-editor";
// TODO: The tactics completions are static, we want them to be dynamic (LSP supplied and/or configurable when the editor is running)
import waterproofTactics from "../../completions/tactics.json";
Expand Down Expand Up @@ -73,8 +74,6 @@ function createConfiguration(format: FileFormat, codeAPI: VSCodeAPI) {
tagConfiguration: tagConfigurationLean,
serializer: new LeanSerializer(),
languageConfig: {
highlightDark: langVerbose.highlight_dark,
highlightLight: langVerbose.highlight_light,
languageSupport: langVerbose.verbose(),
},
}
Expand Down Expand Up @@ -147,6 +146,10 @@ window.onload = () => {
default: throw Error("Invalid theme encountered");
}
})();
for (const [key, value] of Object.entries(getSemanticColors(themeStyle))) {
document.documentElement.style.setProperty(key, value);
}

const editor = new WaterproofEditor(editorElement, cfg, themeStyle);

//@ts-expect-error For now, expose editor in the window. Allows for calling editorInstance methods via the debug console.
Expand Down Expand Up @@ -237,8 +240,15 @@ window.onload = () => {
}
break;
}
case MessageType.themeUpdate:
case MessageType.themeUpdate: {
editor.updateNodeViewThemes(msg.body.theme);
for (const [key, value] of Object.entries(getSemanticColors(msg.body.theme))) {
document.documentElement.style.setProperty(key, value);
}
break;
}
case MessageType.semanticTokens:
editor.setSemanticTokens(msg.body.tokens);
break;
default:
// If we reach this 'default' case, then we have encountered an unknown message type.
Expand Down
29 changes: 29 additions & 0 deletions editor/src/semanticColors.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
import { ThemeStyle } from "@impermeable/waterproof-editor";

export type SemanticColorMap = Record<string, string>;

const darkColors: SemanticColorMap = {
"--wp-semanticKeyword": "#7aa2f7",
"--wp-semanticFunction": "#e0af68",
"--wp-semanticType": "#2ac3de",
"--wp-semanticVariable": "#e8c574",
"--wp-semanticProperty": "#73daca",
"--wp-semanticLiteral": "#9ece6a",
"--wp-semanticComment": "#565f89",
"--wp-semanticLeanSorryLike": "#f7768e",
};

const lightColors: SemanticColorMap = {
"--wp-semanticKeyword": "#0033b3",
"--wp-semanticFunction": "#795e26",
"--wp-semanticType": "#267f99",
"--wp-semanticVariable": "#9b6800",
"--wp-semanticProperty": "#0070c1",
"--wp-semanticLiteral": "#098658",
"--wp-semanticComment": "#6a9955",
"--wp-semanticLeanSorryLike": "#cd3131",
};

export function getSemanticColors(theme: ThemeStyle): SemanticColorMap {
return theme === ThemeStyle.Dark ? darkColors : lightColors;
}
5 changes: 1 addition & 4 deletions editor/src/vscodemapping.css
Original file line number Diff line number Diff line change
Expand Up @@ -42,9 +42,6 @@
--wp-notificationsForeground: var(--vscode-notifications-foreground);
--wp-quickInputBackground: var(--vscode-quickInput-background);
--wp-quickInputForeground: var(--vscode-quickInput-foreground);
--wp-terminalAnsiGreen: var(--vscode-terminal-ansiGreen);
--wp-terminalAnsiRed: var(--vscode-terminal-ansiRed);
--wp-terminalAnsiYellow: var(--vscode-terminal-ansiYellow);
--wp-textCodeBlockBackground: var(--vscode-textCodeBlock-background);
--wp-textSeparatorForeground: var(--vscode-textSeparator-foreground);
}
}
6 changes: 4 additions & 2 deletions shared/Messages.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { DocChange, WrappingDocChange, InputAreaStatus, HistoryChange, ThemeStyle, OffsetDiagnostic } from "@impermeable/waterproof-editor";
import { DocChange, WrappingDocChange, InputAreaStatus, HistoryChange, ThemeStyle, OffsetDiagnostic, OffsetSemanticToken } from "@impermeable/waterproof-editor";
import { RocqGoalAnswer, HypVisibility, PpString } from "../lib/types";
import { Completion } from "@impermeable/waterproof-editor";
import { ServerStatus } from "./ServerStatus";
Expand Down Expand Up @@ -53,6 +53,7 @@ export type Message =
// The payload is forwarded to an InfoView instance, so its type does not concern us
// eslint-disable-next-line @typescript-eslint/no-explicit-any
| MessageBase<MessageType.infoviewRpc, { payload: any }>
| MessageBase<MessageType.semanticTokens, { tokens: Array<OffsetSemanticToken> }>
Copy link

Copilot AI Mar 24, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The semanticTokens message payload does not include a document version, unlike diagnostics/init which are versioned. Because semantic token computation is async, the webview can receive stale tokens after newer edits. Consider adding a version field (e.g., document.version) and having the editor ignore tokens that don't match the current document version.

Suggested change
| MessageBase<MessageType.semanticTokens, { tokens: Array<OffsetSemanticToken> }>
| MessageBase<MessageType.semanticTokens, { tokens: Array<OffsetSemanticToken>, version: number }>

Copilot uses AI. Check for mistakes.
Copy link
Copy Markdown
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Should be very rare in practice and will recover on the next request, I think its not worth adding that.

;

/**
Expand Down Expand Up @@ -87,5 +88,6 @@ export const enum MessageType {
teacher,
themeUpdate,
viewportHint,
infoviewRpc
infoviewRpc,
semanticTokens
}
131 changes: 122 additions & 9 deletions src/lsp-client/lean/client.ts
Original file line number Diff line number Diff line change
@@ -1,26 +1,28 @@
import { LeanGoalAnswer, LeanGoalRequest } from "../../../lib/types";
import { LspClient } from "../client";
import { EventEmitter, Position, TextDocument, Disposable, Range, OutputChannel, Diagnostic, DiagnosticSeverity } from "vscode";
import { CancellationTokenSource, EventEmitter, Position, TextDocument, Disposable, Range, OutputChannel, Diagnostic, DiagnosticSeverity} from "vscode";
import { VersionedTextDocumentIdentifier } from "vscode-languageserver-types";
import { FileProgressParams } from "../requestTypes";
import { leanFileProgressNotificationType, leanGoalRequestType, LeanPublishDiagnosticsParams } from "./requestTypes";
import { WaterproofConfigHelper, WaterproofLogger as wpl, WaterproofSetting } from "../../helpers";
import { LanguageClientProvider, WpDiagnostic } from "../clientTypes";
import { WebviewManager } from "../../webviewManager";
import { findOccurrences } from "../qedStatus";
import { InputAreaStatus } from "@impermeable/waterproof-editor";
import { InputAreaStatus, OffsetSemanticToken, SemanticTokenType } from "@impermeable/waterproof-editor";
import { ServerStoppedReason } from "@leanprover/infoview-api";
import { DidChangeTextDocumentParams, DidCloseTextDocumentParams } from "vscode-languageclient";
import { DidChangeTextDocumentParams, DidCloseTextDocumentParams, SemanticTokensRegistrationType } from "vscode-languageclient";
import { FileProgressKind, MessageType } from "../../../shared";

export class LeanLspClient extends LspClient<LeanGoalRequest, LeanGoalAnswer> {
language = "lean4";

private semanticTokenTimer?: NodeJS.Timeout | number;

/**
* Whether the Lean server is still processing the document.
* Used to avoid marking a proof as complete before checking has finished.
*/
private isBusy: boolean = true;
private isBusy: boolean = true;

constructor(clientProvider: LanguageClientProvider, channel: OutputChannel) {
super(clientProvider, channel);
Expand Down Expand Up @@ -80,12 +82,15 @@ export class LeanLspClient extends LspClient<LeanGoalRequest, LeanGoalAnswer> {
}

protected async onFileProgress(progress: FileProgressParams) {
if (this.activeDocument?.uri.toString() === progress.textDocument.uri) {
this.requestSemanticTokensDebounced(this.activeDocument);
}

// Call super first so LSP ranges are converted to VSCode Ranges before we store/use them.
super.onFileProgress(progress);

if (this.activeDocument?.uri.toString() === progress.textDocument.uri) {

// --- busy-indicator (Lean edition) ---
// Find the first processing range, where we want to add the busy-indicator to.
const firstProcessing = progress.processing.find(
Expand Down Expand Up @@ -193,16 +198,16 @@ export class LeanLspClient extends LspClient<LeanGoalRequest, LeanGoalAnswer> {
if (hasErrorDiagnostic) {
return InputAreaStatus.Incorrect;
}

const goalsPosition = inputArea.end.translate(0, 0);

const goalsParams = this.createGoalsRequestParameters(document, goalsPosition);
const response = await this.requestGoals(goalsParams);

if (!response) {
return InputAreaStatus.Incorrect;
}

const status = response.goals.length > 0 ? InputAreaStatus.Incorrect : InputAreaStatus.Correct;
return status;
}
Expand Down Expand Up @@ -236,11 +241,119 @@ export class LeanLspClient extends LspClient<LeanGoalRequest, LeanGoalAnswer> {
public clientStopped = this.clientStoppedEmitter.event;

async dispose(timeout?: number): Promise<void> {
if (this.semanticTokenTimer) {
clearTimeout(this.semanticTokenTimer);
}
await super.dispose(timeout);
this.clientStoppedEmitter.fire({message: 'Lean server has stopped', reason: ''});
}

private requestSemanticTokensDebounced(document: TextDocument) {
if (this.semanticTokenTimer) {
clearTimeout(this.semanticTokenTimer);
}
this.semanticTokenTimer = setTimeout(() => {
this.requestAndForwardSemanticTokens(document).catch(err => {
wpl.debug(`Semantic token request failed: ${err}`)
});
}, 300);
}

private async requestAndForwardSemanticTokens(document: TextDocument): Promise<void> {
if (!this.client.isRunning() || !this.webviewManager) return;

const feature = this.client.getFeature(SemanticTokensRegistrationType.method);
if (!feature) return;

const provider = feature.getProvider(document);
if (!provider?.full) return;

const cts = new CancellationTokenSource();
const tokens = await Promise.resolve(
provider.full.provideDocumentSemanticTokens(document, cts.token)
).catch(() => undefined).finally(() => cts.dispose());

if (!tokens?.data?.length) return;

const tokenLegend = this.client.initializeResult?.capabilities.semanticTokensProvider?.legend;
if (!tokenLegend) return;

const offsetTokens: Array<OffsetSemanticToken> = LeanLspClient.decodeLspTokens(tokens.data).flatMap(t => {
const tokenType = LeanLspClient.mapLspTokenType(tokenLegend.tokenTypes[t.tokenTypeIndex]);
if (tokenType === undefined) return [];

const startOffset = document.offsetAt(new Position(t.line, t.char));
return [{
startOffset,
endOffset: startOffset + t.length,
type: tokenType,
}];
});

this.webviewManager.postMessage(document.uri.toString(), {
type: MessageType.semanticTokens,
body: {tokens: offsetTokens},
});
Comment on lines +251 to +296
Copy link

Copilot AI Mar 24, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Semantic token requests can overlap (debounce timer + async provider call), and results may arrive out-of-order; the last-resolving request will overwrite highlights even if it corresponds to an older document version. Consider tracking a monotonically increasing request generation / document.version and discarding stale results (and/or reusing a single CancellationTokenSource that you cancel when scheduling a new request).

Copilot uses AI. Check for mistakes.
Copy link
Copy Markdown
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Should be very rare in practice and will recover on the next request, I think its not worth adding that.

}

private static decodeLspTokens(data: Uint32Array): Array<{ line: number; char: number; length: number; tokenTypeIndex: number }> {
if (data.length % 5 !== 0) {
wpl.debug(`[SemanticTokens] Malformed token data: length ${data.length} is not a multiple of 5`);
}
const tokens = [];
let line = 0;
let char = 0;
for (let i = 0; i + 4 < data.length; i += 5) {
const deltaLine = data[i];
const deltaStartChar = data[i + 1];
if (deltaLine > 0) {
line += deltaLine;
char = deltaStartChar;
} else {
char += deltaStartChar;
}
tokens.push({
line,
char,
length: data[i + 2],
tokenTypeIndex: data[i + 3],
// data[i + 4] is tokenModifiers (unused)
});
}
return tokens;
}

private static mapLspTokenType(lspType: string): SemanticTokenType | undefined {
switch (lspType) {
case "keyword": return SemanticTokenType.Keyword;
case "variable": return SemanticTokenType.Variable;
case "property": return SemanticTokenType.Property;
case "function": return SemanticTokenType.Function;
case "namespace": return SemanticTokenType.Namespace;
case "type": return SemanticTokenType.Type;
case "class": return SemanticTokenType.Class;
case "enum": return SemanticTokenType.Enum;
case "interface": return SemanticTokenType.Interface;
case "struct": return SemanticTokenType.Struct;
case "typeParameter": return SemanticTokenType.TypeParameter;
case "parameter": return SemanticTokenType.Parameter;
case "enumMember": return SemanticTokenType.EnumMember;
case "event": return SemanticTokenType.Event;
case "method": return SemanticTokenType.Method;
case "macro": return SemanticTokenType.Macro;
case "modifier": return SemanticTokenType.Modifier;
case "comment": return SemanticTokenType.Comment;
case "string": return SemanticTokenType.String;
case "number": return SemanticTokenType.Number;
case "regexp": return SemanticTokenType.Regexp;
case "operator": return SemanticTokenType.Operator;
case "decorator": return SemanticTokenType.Decorator;
case "leanSorryLike": return SemanticTokenType.LeanSorryLike;
default: return undefined;
}
}
Comment on lines +299 to +354
Copy link

Copilot AI Mar 24, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

New semantic-token decoding/mapping logic is non-trivial and currently untested. There are already Jest tests for LeanLspClient in tests/lsp-client/lean; consider adding focused unit tests for decodeLspTokens (delta encoding) and mapLspTokenType to prevent regressions.

Copilot uses AI. Check for mistakes.
Copy link
Copy Markdown
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think its not worth it to test it, because the decoding is based on the specification of the lsp and the mapping function is trivial.


protected onDocumentChanged(): void {
this.isBusy = true;
}
}
}