/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
import * as strings from '../../../base/common/strings.js';
import { LineTokens } from '../tokens/lineTokens.js';
import { TokenizationRegistry } from '../languages.js';
import { NullState, nullTokenizeEncoded } from './nullTokenize.js';
const fallback = {
getInitialState: () => NullState,
tokenizeEncoded: (buffer, hasEOL, state) => nullTokenizeEncoded(0 /* LanguageId.Null */, state)
};
export function tokenizeToString(languageService, text, languageId) {
return __awaiter(this, void 0, void 0, function* () {
if (!languageId) {
return _tokenizeToString(text, languageService.languageIdCodec, fallback);
}
const tokenizationSupport = yield TokenizationRegistry.getOrCreate(languageId);
return _tokenizeToString(text, languageService.languageIdCodec, tokenizationSupport || fallback);
});
}
export function tokenizeLineToHTML(text, viewLineTokens, colorMap, startOffset, endOffset, tabSize, useNbsp) {
let result = `
`;
let charIndex = startOffset;
let tabsCharDelta = 0;
let prevIsSpace = true;
for (let tokenIndex = 0, tokenCount = viewLineTokens.getCount(); tokenIndex < tokenCount; tokenIndex++) {
const tokenEndIndex = viewLineTokens.getEndOffset(tokenIndex);
if (tokenEndIndex <= startOffset) {
continue;
}
let partContent = '';
for (; charIndex < tokenEndIndex && charIndex < endOffset; charIndex++) {
const charCode = text.charCodeAt(charIndex);
switch (charCode) {
case 9 /* CharCode.Tab */: {
let insertSpacesCount = tabSize - (charIndex + tabsCharDelta) % tabSize;
tabsCharDelta += insertSpacesCount - 1;
while (insertSpacesCount > 0) {
if (useNbsp && prevIsSpace) {
partContent += ' ';
prevIsSpace = false;
}
else {
partContent += ' ';
prevIsSpace = true;
}
insertSpacesCount--;
}
break;
}
case 60 /* CharCode.LessThan */:
partContent += '<';
prevIsSpace = false;
break;
case 62 /* CharCode.GreaterThan */:
partContent += '>';
prevIsSpace = false;
break;
case 38 /* CharCode.Ampersand */:
partContent += '&';
prevIsSpace = false;
break;
case 0 /* CharCode.Null */:
partContent += '';
prevIsSpace = false;
break;
case 65279 /* CharCode.UTF8_BOM */:
case 8232 /* CharCode.LINE_SEPARATOR */:
case 8233 /* CharCode.PARAGRAPH_SEPARATOR */:
case 133 /* CharCode.NEXT_LINE */:
partContent += '\ufffd';
prevIsSpace = false;
break;
case 13 /* CharCode.CarriageReturn */:
// zero width space, because carriage return would introduce a line break
partContent += '';
prevIsSpace = false;
break;
case 32 /* CharCode.Space */:
if (useNbsp && prevIsSpace) {
partContent += ' ';
prevIsSpace = false;
}
else {
partContent += ' ';
prevIsSpace = true;
}
break;
default:
partContent += String.fromCharCode(charCode);
prevIsSpace = false;
}
}
result += `${partContent}`;
if (tokenEndIndex > endOffset || charIndex >= endOffset) {
break;
}
}
result += `
`;
return result;
}
export function _tokenizeToString(text, languageIdCodec, tokenizationSupport) {
let result = ``;
const lines = strings.splitLines(text);
let currentState = tokenizationSupport.getInitialState();
for (let i = 0, len = lines.length; i < len; i++) {
const line = lines[i];
if (i > 0) {
result += `
`;
}
const tokenizationResult = tokenizationSupport.tokenizeEncoded(line, true, currentState);
LineTokens.convertToEndOffset(tokenizationResult.tokens, line.length);
const lineTokens = new LineTokens(tokenizationResult.tokens, line, languageIdCodec);
const viewLineTokens = lineTokens.inflate();
let startOffset = 0;
for (let j = 0, lenJ = viewLineTokens.getCount(); j < lenJ; j++) {
const type = viewLineTokens.getClassName(j);
const endIndex = viewLineTokens.getEndOffset(j);
result += `${strings.escape(line.substring(startOffset, endIndex))}`;
startOffset = endIndex;
}
currentState = tokenizationResult.endState;
}
result += `
`;
return result;
}