Skip to content

Commit

Permalink
Merge branch 'develop' into main
Browse files Browse the repository at this point in the history
  • Loading branch information
znorman-harris committed Sep 6, 2023
2 parents 9812c65 + 9ff82db commit 3916d71
Show file tree
Hide file tree
Showing 258 changed files with 2,091 additions and 1,104 deletions.
8 changes: 8 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,14 @@ Here are some of the features that notebooks bring:

- After each cell is executed we issue a `retall` command to make sure that we are at the top-level and not stopped in a weird state

## 3.2.2 September 2023

Major change to the language server and worker threads to implement a cancellation framework. The ability to cancel work happens automatically for PRO files and IDL Notebooks.

This change will address performance issues where, if the code can not be parsed as quickly as you were typing, you would not get auto-complete, outlines, hover-help, semantic tokens, formatting on save, etc.

In these cases it could take 15-30 seconds for the language server to respond while it worked through a backlog of processing that was no longer relevant.

## 3.2.1 August 2023

Notebook key behavior change: If you are running one or more cells, and there is an error from IDL for any cell being executed, all pending cells are cleared and not executed. This makes sure that, if later cells depend on earlier ones, that you don't have cascading failures.
Expand Down
219 changes: 120 additions & 99 deletions apps/parsing-worker/src/main.ts
Original file line number Diff line number Diff line change
Expand Up @@ -98,9 +98,9 @@ client.on(LSP_WORKER_THREAD_MESSAGE_LOOKUP.TRACK_GLOBAL, async (message) => {
*/
client.on(
LSP_WORKER_THREAD_MESSAGE_LOOKUP.CHANGE_DETECTION,
async (message) => {
async (message, cancel) => {
// run change detection!
const changed = ChangeDetection(WORKER_INDEX, message.changed);
const changed = ChangeDetection(WORKER_INDEX, cancel, message.changed);

// get syntax problems
const problems = WORKER_INDEX.getSyntaxProblems();
Expand Down Expand Up @@ -171,45 +171,51 @@ client.on(LSP_WORKER_THREAD_MESSAGE_LOOKUP.GET_TOKEN_DEF, async (message) => {
/**
* Handle requests to parse and post process a file
*/
client.on(LSP_WORKER_THREAD_MESSAGE_LOOKUP.PARSE_FILE, async (message) => {
// index the file
const parsed = await WORKER_INDEX.getParsedProCode(
message.file,
WORKER_INDEX.getFileStrings(message.file),
message
);

// make non-circular
RemoveScopeDetail(parsed);

// return
return parsed;
});
client.on(
LSP_WORKER_THREAD_MESSAGE_LOOKUP.PARSE_FILE,
async (message, cancel) => {
// index the file
const parsed = await WORKER_INDEX.getParsedProCode(
message.file,
WORKER_INDEX.getFileStrings(message.file),
message
);

// make non-circular
RemoveScopeDetail(parsed, cancel);

// return
return parsed;
}
);

/**
* Handle requests to parse and post process code for a file
*/
client.on(LSP_WORKER_THREAD_MESSAGE_LOOKUP.PARSE_CODE, async (message) => {
// index the file
const parsed = await WORKER_INDEX.getParsedProCode(
message.file,
message.code,
message
);

// make non-circular
RemoveScopeDetail(parsed);

// return
return parsed;
});
client.on(
LSP_WORKER_THREAD_MESSAGE_LOOKUP.PARSE_CODE,
async (message, cancel) => {
// index the file
const parsed = await WORKER_INDEX.getParsedProCode(
message.file,
message.code,
message
);

// make non-circular
RemoveScopeDetail(parsed, cancel);

// return
return parsed;
}
);

/**
* Parse files quickly to get the basic overview and thats it
*/
client.on(
LSP_WORKER_THREAD_MESSAGE_LOOKUP.PARSE_FILES_FAST,
async (message) => {
async (message, cancel) => {
/** Get files to process */
const files = message.files;

Expand All @@ -233,7 +239,7 @@ client.on(
/**
* Parse our file
*/
const parsed = ParseFileSync(files[i], { full: false });
const parsed = ParseFileSync(files[i], cancel, { full: false });

// track syntax problems
WORKER_INDEX.trackSyntaxProblemsForFile(files[i], parsed.parseProblems);
Expand Down Expand Up @@ -325,50 +331,54 @@ client.on(LSP_WORKER_THREAD_MESSAGE_LOOKUP.PARSE_FILES, async (message) => {
/**
* Parse notebooks
*/
client.on(LSP_WORKER_THREAD_MESSAGE_LOOKUP.PARSE_NOTEBOOK, async (message) => {
/**
* Initialize our response
*/
const resp: ParseNotebookResponse = {
lines: 0,
globals: {},
problems: {},
};
client.on(
LSP_WORKER_THREAD_MESSAGE_LOOKUP.PARSE_NOTEBOOK,
async (message, cancel) => {
/**
* Initialize our response
*/
const resp: ParseNotebookResponse = {
lines: 0,
globals: {},
problems: {},
};

/**
* Index our notebook
*/
const byCell = await WORKER_INDEX.indexIDLNotebook(
message.file,
message.notebook
);
/**
* Index our notebook
*/
const byCell = await WORKER_INDEX.getParsedNotebook(
message.file,
message.notebook,
cancel
);

/**
* Get files for cells that we actually processed
*/
const files = Object.keys(byCell);
/**
* Get files for cells that we actually processed
*/
const files = Object.keys(byCell);

// process each cell and save information we need to return
for (let i = 0; i < files.length; i++) {
if (byCell[files[i]] === undefined) {
resp.globals[files[i]] = [];
resp.problems[files[i]] = [];
continue;
// process each cell and save information we need to return
for (let i = 0; i < files.length; i++) {
if (byCell[files[i]] === undefined) {
resp.globals[files[i]] = [];
resp.problems[files[i]] = [];
continue;
}
resp.globals[files[i]] = byCell[files[i]].global;
resp.problems[files[i]] = GetSyntaxProblems(byCell[files[i]]);
}
resp.globals[files[i]] = byCell[files[i]].global;
resp.problems[files[i]] = GetSyntaxProblems(byCell[files[i]]);
}

// return each cell
return resp;
});
// return each cell
return resp;
}
);

/**
* Get notebook cells
*/
client.on(
LSP_WORKER_THREAD_MESSAGE_LOOKUP.GET_NOTEBOOK_CELL,
async (message) => {
async (message, cancel) => {
// get parsed code and return
const parsed = await WORKER_INDEX.getParsedProCode(
message.file,
Expand All @@ -377,7 +387,7 @@ client.on(

// make non-circular
if (parsed !== undefined) {
RemoveScopeDetail(parsed);
RemoveScopeDetail(parsed, cancel);
}

return parsed;
Expand All @@ -389,14 +399,18 @@ client.on(
*/
client.on(
LSP_WORKER_THREAD_MESSAGE_LOOKUP.POST_PROCESS_FILES,
async (message) => {
async (message, cancel) => {
/** Get files */
const files = Array.isArray(message.files)
? message.files
: WORKER_INDEX.tokensByFile.allFiles();

// post process, no change detection
const missing = await WORKER_INDEX.postProcessProFiles(files, false);
const missing = await WORKER_INDEX.postProcessProFiles(
files,
cancel,
false
);

// get syntax problems
const problems = WORKER_INDEX.getSyntaxProblems();
Expand Down Expand Up @@ -440,46 +454,53 @@ client.on(
* TODO: Correctly perform change detection from removing files instead of processing everything
* we have which is brute force but works
*/
client.on(LSP_WORKER_THREAD_MESSAGE_LOOKUP.REMOVE_FILES, async (message) => {
// remove all files
await WORKER_INDEX.removeWorkspaceFiles(message.files, false);

/** Get files that we manage */
const ourFiles = WORKER_INDEX.tokensByFile.allFiles();
client.on(
LSP_WORKER_THREAD_MESSAGE_LOOKUP.REMOVE_FILES,
async (message, cancel) => {
// remove all files
await WORKER_INDEX.removeWorkspaceFiles(message.files, false);

/** Get files that we manage */
const ourFiles = WORKER_INDEX.tokensByFile.allFiles();

// post process all of our files again
const missing = await WORKER_INDEX.postProcessProFiles(
ourFiles,
cancel,
false
);

// post process all of our files again
const missing = await WORKER_INDEX.postProcessProFiles(ourFiles, false);
// get syntax problems
const problems = WORKER_INDEX.getSyntaxProblems();

// get syntax problems
const problems = WORKER_INDEX.getSyntaxProblems();
// craft our response
const resp: RemoveFilesResponse = {
problems: {},
missing,
};

// craft our response
const resp: RemoveFilesResponse = {
problems: {},
missing,
};
// populate response
for (let i = 0; i < ourFiles.length; i++) {
if (global.gc) {
if (i % IDL_INDEX_OPTIONS.GC_FREQUENCY === 0) {
global.gc();
}
}

// populate response
for (let i = 0; i < ourFiles.length; i++) {
if (global.gc) {
if (i % IDL_INDEX_OPTIONS.GC_FREQUENCY === 0) {
global.gc();
/**
* Skip if we dont have a file. Could happen from parsing errors
*/
if (!WORKER_INDEX.tokensByFile.has(ourFiles[i])) {
continue;
}
}

/**
* Skip if we dont have a file. Could happen from parsing errors
*/
if (!WORKER_INDEX.tokensByFile.has(ourFiles[i])) {
continue;
// populate problems
resp.problems[ourFiles[i]] = problems[ourFiles[i]] || [];
}

// populate problems
resp.problems[ourFiles[i]] = problems[ourFiles[i]] || [];
return resp;
}

return resp;
});
);

/**
* Listen for events from our main thread
Expand Down
9 changes: 6 additions & 3 deletions apps/test-tokenizer/src/parse-tests/format-test.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import { Assembler } from '@idl/assembler';
import { CancellationToken } from '@idl/cancellation-tokens';
import { GetTokenNames, Parser } from '@idl/parser';
import { TimeIt } from '@idl/shared';
import { deepEqual } from 'fast-equals';
Expand Down Expand Up @@ -63,7 +64,7 @@ export async function FormatTest(folder: string): Promise<void> {
for (let i = 0; i < code.length; i++) {
let canTick = true;
// extract tokens
const tokenized = Parser(code[i]);
const tokenized = Parser(code[i], new CancellationToken());

// validate formatting for code
// track time so we can more accurately represent true parsing time
Expand All @@ -73,12 +74,14 @@ export async function FormatTest(folder: string): Promise<void> {
const tokenizedNames = GetTokenNames(tokenized);

// format code
const formatted = Assembler(tokenized, { formatter: 'fiddle' });
const formatted = Assembler(tokenized, new CancellationToken(), {
formatter: 'fiddle',
});

// verify that our tokens are the same as they were before
if (formatted !== undefined) {
// parse formatted code
const reParsed = Parser(formatted);
const reParsed = Parser(formatted, new CancellationToken());

// make sure things equal
if (!deepEqual(GetTokenNames(reParsed), tokenizedNames)) {
Expand Down
3 changes: 2 additions & 1 deletion apps/test-tokenizer/src/parse-tests/routine-popularity.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import { CancellationToken } from '@idl/cancellation-tokens';
import { GLOBAL_TOKEN_TYPES } from '@idl/data-types/core';
import { GetRoutine, IDLIndex } from '@idl/parsing/index';
import { IParsed, TreeRecurserBasic } from '@idl/parsing/syntax-tree';
Expand Down Expand Up @@ -63,7 +64,7 @@ ROUTINES[TOKEN_NAMES.CALL_PROCEDURE_METHOD] = true;
* Tracks the usage stats for routines
*/
export function TrackPopularity(index: IDLIndex, parsed: IParsed) {
TreeRecurserBasic(parsed.tree, {
TreeRecurserBasic(parsed.tree, new CancellationToken(), {
onBranchToken: (token) => {
if (token.name in ROUTINES) {
const defs = GetRoutine(index, parsed, token);
Expand Down
Loading

0 comments on commit 3916d71

Please sign in to comment.