From c88756957a1cbf831c2b6fd6b33196e981a66ffd Mon Sep 17 00:00:00 2001 From: Martin Aeschlimann Date: Thu, 30 Jan 2020 15:57:35 +0100 Subject: [PATCH] rename to semanticTokenTypes, semanticTokenModifiers & semanticTokenStyleDefaults --- extensions/vscode-colorize-tests/package.json | 6 +- .../tokenClassificationExtensionPoint.ts | 60 +++++++++---------- 2 files changed, 33 insertions(+), 33 deletions(-) diff --git a/extensions/vscode-colorize-tests/package.json b/extensions/vscode-colorize-tests/package.json index 152a4c91595..07bb7905e52 100644 --- a/extensions/vscode-colorize-tests/package.json +++ b/extensions/vscode-colorize-tests/package.json @@ -26,19 +26,19 @@ "vscode": "1.1.5" }, "contributes": { - "tokenTypes": [ + "semanticTokenTypes": [ { "id": "testToken", "description": "A test token" } ], - "tokenModifiers": [ + "semanticTokenModifiers": [ { "id": "testModifier", "description": "A test modifier" } ], - "tokenStyleDefaults": [ + "semanticTokenStyleDefaults": [ { "selector": "testToken.testModifier", "light": { diff --git a/src/vs/workbench/services/themes/common/tokenClassificationExtensionPoint.ts b/src/vs/workbench/services/themes/common/tokenClassificationExtensionPoint.ts index c208e5bd901..20051b91208 100644 --- a/src/vs/workbench/services/themes/common/tokenClassificationExtensionPoint.ts +++ b/src/vs/workbench/services/themes/common/tokenClassificationExtensionPoint.ts @@ -41,22 +41,22 @@ const colorPattern = '^#([0-9A-Fa-f]{6})([0-9A-Fa-f]{2})?$'; const tokenClassificationRegistry: ITokenClassificationRegistry = getTokenClassificationRegistry(); const tokenTypeExtPoint = ExtensionsRegistry.registerExtensionPoint({ - extensionPoint: 'tokenTypes', + extensionPoint: 'semanticTokenTypes', jsonSchema: { - description: nls.localize('contributes.tokenTypes', 'Contributes semantic token types.'), + description: nls.localize('contributes.semanticTokenTypes', 'Contributes semantic token types.'), type: 'array', items: { type: 'object', properties: { id: { type: 'string', - description: nls.localize('contributes.tokenTypes.id', 'The identifier of the token type'), + description: nls.localize('contributes.semanticTokenTypes.id', 'The identifier of the semantic token type'), pattern: typeAndModifierIdPattern, - patternErrorMessage: nls.localize('contributes.tokenTypes.id.format', 'Identifiers should be in the form letterOrDigit[_-letterOrDigit]*'), + patternErrorMessage: nls.localize('contributes.semanticTokenTypes.id.format', 'Identifiers should be in the form letterOrDigit[_-letterOrDigit]*'), }, description: { type: 'string', - description: nls.localize('contributes.color.description', 'The description of the token type'), + description: nls.localize('contributes.color.description', 'The description of the semantic token type'), } } } @@ -64,21 +64,21 @@ const tokenTypeExtPoint = ExtensionsRegistry.registerExtensionPoint({ - extensionPoint: 'tokenModifiers', + extensionPoint: 'semanticTokenModifiers', jsonSchema: { - description: nls.localize('contributes.tokenModifiers', 'Contributes semantic token modifiers.'), + description: nls.localize('contributes.semanticTokenModifiers', 'Contributes semantic token modifiers.'), type: 'array', items: { type: 'object', properties: { id: { type: 'string', - description: nls.localize('contributes.tokenModifiers.id', 'The identifier of the token modifier'), + description: nls.localize('contributes.semanticTokenModifiers.id', 'The identifier of the semantic token modifier'), pattern: typeAndModifierIdPattern, - patternErrorMessage: nls.localize('contributes.tokenModifiers.id.format', 'Identifiers should be in the form letterOrDigit[_-letterOrDigit]*') + patternErrorMessage: nls.localize('contributes.semanticTokenModifiers.id.format', 'Identifiers should be in the form letterOrDigit[_-letterOrDigit]*') }, description: { - description: nls.localize('contributes.tokenModifiers.description', 'The description of the token modifier') + description: nls.localize('contributes.semanticTokenModifiers.description', 'The description of the semantic token modifier') } } } @@ -86,36 +86,36 @@ const tokenModifierExtPoint = ExtensionsRegistry.registerExtensionPoint({ - extensionPoint: 'tokenStyleDefaults', + extensionPoint: 'semanticTokenStyleDefaults', jsonSchema: { - description: nls.localize('contributes.tokenStyleDefaults', 'Contributes semantic token style default.'), + description: nls.localize('contributes.semanticTokenStyleDefaults', 'Contributes semantic token style defaults.'), type: 'array', items: { type: 'object', properties: { selector: { type: 'string', - description: nls.localize('contributes.tokenStyleDefaults.selector', 'The selector matching token types and modifiers.'), + description: nls.localize('contributes.semanticTokenStyleDefaults.selector', 'The selector matching token types and modifiers.'), pattern: selectorPattern, - patternErrorMessage: nls.localize('contributes.tokenStyleDefaults.selector.format', 'Selectors should be in the form (type|*)(.modifier)*'), + patternErrorMessage: nls.localize('contributes.semanticTokenStyleDefaults.selector.format', 'Selectors should be in the form (type|*)(.modifier)*'), }, scopes: { type: 'array', - description: nls.localize('contributes.scopes.light', 'A list of textmate scopes that are matched against the current color theme to find a default style'), + description: nls.localize('contributes.scopes.light', 'A list of TextMate scopes that are matched against the current color theme to find a default style'), items: { type: 'string' } }, light: { - description: nls.localize('contributes.tokenStyleDefaults.light', 'The default style used for light themes'), + description: nls.localize('contributes.semanticTokenStyleDefaults.light', 'The default style used for light themes'), $ref: textmateColorSettingsSchemaId }, dark: { - description: nls.localize('contributes.tokenStyleDefaults.dark', 'The default style used for dark themes'), + description: nls.localize('contributes.semanticTokenStyleDefaults.dark', 'The default style used for dark themes'), $ref: textmateColorSettingsSchemaId }, highContrast: { - description: nls.localize('contributes.tokenStyleDefaults.hc', 'The default style used for high contrast themes'), + description: nls.localize('contributes.semanticTokenStyleDefaults.hc', 'The default style used for high contrast themes'), $ref: textmateColorSettingsSchemaId } } @@ -154,7 +154,7 @@ export class TokenClassificationExtensionPoints { } if (style.fontStyle) { if (typeof style.fontStyle !== 'string' || !style.fontStyle.match(fontStylePattern)) { - collector.error(nls.localize('invalid.fontStyle', "'configuration.{0}.fontStyle' must be a one or a compination of \'italic\', \'bold\' or \'underline\' or the empty string", extensionPoint)); + collector.error(nls.localize('invalid.fontStyle', "'configuration.{0}.fontStyle' must be one or a combination of \'italic\', \'bold\' or \'underline\' or the empty string", extensionPoint)); return undefined; } } @@ -167,11 +167,11 @@ export class TokenClassificationExtensionPoints { const collector = extension.collector; if (!extensionValue || !Array.isArray(extensionValue)) { - collector.error(nls.localize('invalid.tokenTypeConfiguration', "'configuration.tokenType' must be an array")); + collector.error(nls.localize('invalid.semanticTokenTypeConfiguration', "'configuration.semanticTokenType' must be an array")); return; } for (const contribution of extensionValue) { - if (validateTypeOrModifier(contribution, 'tokenType', collector)) { + if (validateTypeOrModifier(contribution, 'semanticTokenType', collector)) { tokenClassificationRegistry.registerTokenType(contribution.id, contribution.description); } } @@ -189,11 +189,11 @@ export class TokenClassificationExtensionPoints { const collector = extension.collector; if (!extensionValue || !Array.isArray(extensionValue)) { - collector.error(nls.localize('invalid.tokenModifierConfiguration', "'configuration.tokenModifier' must be an array")); + collector.error(nls.localize('invalid.semanticTokenModifierConfiguration', "'configuration.semanticTokenModifier' must be an array")); return; } for (const contribution of extensionValue) { - if (validateTypeOrModifier(contribution, 'tokenModifier', collector)) { + if (validateTypeOrModifier(contribution, 'semanticTokenModifier', collector)) { tokenClassificationRegistry.registerTokenModifier(contribution.id, contribution.description); } } @@ -211,16 +211,16 @@ export class TokenClassificationExtensionPoints { const collector = extension.collector; if (!extensionValue || !Array.isArray(extensionValue)) { - collector.error(nls.localize('invalid.tokenStyleDefaultConfiguration', "'configuration.tokenStyleDefaults' must be an array")); + collector.error(nls.localize('invalid.semanticTokenStyleDefaultConfiguration', "'configuration.semanticTokenStyleDefaults' must be an array")); return; } for (const contribution of extensionValue) { if (typeof contribution.selector !== 'string' || contribution.selector.length === 0) { - collector.error(nls.localize('invalid.selector', "'configuration.tokenStyleDefaults.selector' must be defined and can not be empty")); + collector.error(nls.localize('invalid.selector', "'configuration.semanticTokenStyleDefaults.selector' must be defined and can not be empty")); continue; } if (!contribution.selector.match(selectorPattern)) { - collector.error(nls.localize('invalid.selector.format', "'configuration.tokenStyleDefaults.selector' must be in the form (type|*)(.modifier)*")); + collector.error(nls.localize('invalid.selector.format', "'configuration.semanticTokenStyleDefaults.selector' must be in the form (type|*)(.modifier)*")); continue; } @@ -228,14 +228,14 @@ export class TokenClassificationExtensionPoints { if (contribution.scopes) { if ((!Array.isArray(contribution.scopes) || contribution.scopes.some(s => typeof s !== 'string'))) { - collector.error(nls.localize('invalid.scopes', "If defined, 'configuration.tokenStyleDefaults.scopes' must be an array or strings")); + collector.error(nls.localize('invalid.scopes', "If defined, 'configuration.semanticTokenStyleDefaults.scopes' must be an array or strings")); continue; } tokenStyleDefault.scopesToProbe = [contribution.scopes]; } - tokenStyleDefault.light = validateStyle(contribution.light, 'tokenStyleDefaults.light', collector); - tokenStyleDefault.dark = validateStyle(contribution.dark, 'tokenStyleDefaults.dark', collector); - tokenStyleDefault.hc = validateStyle(contribution.highContrast, 'tokenStyleDefaults.highContrast', collector); + tokenStyleDefault.light = validateStyle(contribution.light, 'semanticTokenStyleDefaults.light', collector); + tokenStyleDefault.dark = validateStyle(contribution.dark, 'semanticTokenStyleDefaults.dark', collector); + tokenStyleDefault.hc = validateStyle(contribution.highContrast, 'semanticTokenStyleDefaults.highContrast', collector); const [type, ...modifiers] = contribution.selector.split('.'); const classification = tokenClassificationRegistry.getTokenClassification(type, modifiers);