routie dev init since i didn't adhere to any proper guidance up until now
This commit is contained in:
+21
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2026 Yosuke Ota
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
+146
@@ -0,0 +1,146 @@
|
||||
# @ota-meshi/ast-token-store
|
||||
|
||||
[](https://www.npmjs.com/package/@ota-meshi/ast-token-store)
|
||||
[](https://github.com/ota-meshi/ast-token-store/blob/main/LICENSE)
|
||||
|
||||
A class library that provides an API similar to [ESLint's SourceCode#getFirstToken and related methods](https://eslint.org/docs/latest/extend/custom-rules#accessing-the-source-code) for any AST.
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
npm install @ota-meshi/ast-token-store
|
||||
```
|
||||
|
||||
## Requirements
|
||||
|
||||
- Node.js `^20.19.0 || ^22.13.0 || >=24`
|
||||
|
||||
## Usage
|
||||
|
||||
### Basic Example
|
||||
|
||||
The `TokenStore` class provides a way to query tokens from any AST that uses `range: [number, number]` to represent source locations.
|
||||
|
||||
```ts
|
||||
import { TokenStore } from "@ota-meshi/ast-token-store";
|
||||
|
||||
// Define your TokenStore subclass (or use it directly)
|
||||
const store = new TokenStore({
|
||||
// Provide all tokens and comments, sorted or unsorted
|
||||
tokens: [...ast.tokens, ...ast.comments],
|
||||
// A type guard to distinguish comments from regular tokens
|
||||
isComment: (token): token is Comment => token.type === "Comment",
|
||||
});
|
||||
```
|
||||
|
||||
### Constructor
|
||||
|
||||
```ts
|
||||
new TokenStore<Node, Token, Comment>({
|
||||
tokens: (Token | Comment)[],
|
||||
isComment: (token: Token | Comment) => token is Comment,
|
||||
})
|
||||
```
|
||||
|
||||
- **`tokens`** — An array of all tokens and comments.
|
||||
- **`isComment`** — A type guard function that returns `true` if a given token is a comment.
|
||||
|
||||
The class has three generic type parameters:
|
||||
|
||||
| Parameter | Description |
|
||||
| --------- | ------------------------------------------------------- |
|
||||
| `Node` | The AST node type (must have `range: [number, number]`) |
|
||||
| `Token` | The token type (must have `range: [number, number]`) |
|
||||
| `Comment` | The comment type (must have `range: [number, number]`) |
|
||||
|
||||
### Methods
|
||||
|
||||
All methods accept options for filtering, skipping, counting, and including comments.
|
||||
|
||||
#### Single Token Methods
|
||||
|
||||
| Method | Description |
|
||||
| --------------------------------------------- | ------------------------------------------------------ |
|
||||
| `getFirstToken(node, options?)` | Gets the first token of the given node |
|
||||
| `getLastToken(node, options?)` | Gets the last token of the given node |
|
||||
| `getTokenAfter(node, options?)` | Gets the token that follows the given node |
|
||||
| `getTokenBefore(node, options?)` | Gets the token that precedes the given node |
|
||||
| `getFirstTokenBetween(left, right, options?)` | Gets the first token between two non-overlapping nodes |
|
||||
| `getLastTokenBetween(left, right, options?)` | Gets the last token between two non-overlapping nodes |
|
||||
|
||||
#### Multiple Token Methods
|
||||
|
||||
| Method | Description |
|
||||
| ---------------------------------------------- | --------------------------------------------------- |
|
||||
| `getFirstTokens(node, options?)` | Gets the first `count` tokens of the given node |
|
||||
| `getLastTokens(node, options?)` | Gets the last `count` tokens of the given node |
|
||||
| `getTokensAfter(node, options?)` | Gets the `count` tokens that follow the given node |
|
||||
| `getTokensBefore(node, options?)` | Gets the `count` tokens that precede the given node |
|
||||
| `getFirstTokensBetween(left, right, options?)` | Gets the first `count` tokens between two nodes |
|
||||
| `getLastTokensBetween(left, right, options?)` | Gets the last `count` tokens between two nodes |
|
||||
| `getTokens(node, options?)` | Gets all tokens within the given node |
|
||||
| `getTokensBetween(left, right, options?)` | Gets all tokens between two nodes |
|
||||
| `getAllTokens()` | Gets all tokens including comments |
|
||||
|
||||
#### Comment Methods
|
||||
|
||||
| Method | Description |
|
||||
| ----------------------------------- | ------------------------------------------------------------ |
|
||||
| `getAllComments()` | Gets all comment tokens |
|
||||
| `getCommentsInside(nodeOrToken)` | Gets all comment tokens inside the given node/token |
|
||||
| `getCommentsBefore(nodeOrToken)` | Gets all comment tokens directly before the given node/token |
|
||||
| `getCommentsAfter(nodeOrToken)` | Gets all comment tokens directly after the given node/token |
|
||||
| `commentsExistBetween(left, right)` | Checks if any comments exist between two nodes |
|
||||
|
||||
#### Spacing Methods
|
||||
|
||||
| Method | Description |
|
||||
| ----------------------------- | --------------------------------------------------------------- |
|
||||
| `isSpaceBetween(left, right)` | Checks if there is whitespace between two non-overlapping nodes |
|
||||
|
||||
### Options
|
||||
|
||||
Single token methods (`getFirstToken`, `getLastToken`, etc.) accept skip options:
|
||||
|
||||
```ts
|
||||
// Skip N tokens
|
||||
store.getFirstToken(node, { skip: 1 });
|
||||
// Shorthand: pass a number directly
|
||||
store.getFirstToken(node, 1);
|
||||
|
||||
// Filter tokens
|
||||
store.getFirstToken(node, {
|
||||
filter: (token) => token.type === "Punctuator",
|
||||
});
|
||||
|
||||
// Include comments in the search
|
||||
store.getFirstToken(node, { includeComments: true });
|
||||
```
|
||||
|
||||
Multiple token methods (`getFirstTokens`, `getTokensAfter`, etc.) accept count options:
|
||||
|
||||
```ts
|
||||
// Get up to N tokens
|
||||
store.getFirstTokens(node, { count: 3 });
|
||||
// Shorthand: pass a number directly
|
||||
store.getFirstTokens(node, 3);
|
||||
|
||||
// Filter and count
|
||||
store.getTokensAfter(node, {
|
||||
filter: (token) => token.type === "Punctuator",
|
||||
count: 2,
|
||||
});
|
||||
|
||||
// Include comments
|
||||
store.getTokens(node, { includeComments: true });
|
||||
```
|
||||
|
||||
## Attribution
|
||||
|
||||
This library aims for compatibility with ESLint's `SourceCode` token API and its method surface. The method design follows ideas described by ESLint, which is licensed under MIT. The implementation here is original and does not reuse ESLint code.
|
||||
|
||||
ESLint is distributed under the MIT License. See the ESLint repository for details: <https://github.com/eslint/eslint/blob/main/LICENSE>
|
||||
|
||||
## License
|
||||
|
||||
[MIT](./LICENSE)
|
||||
+313
@@ -0,0 +1,313 @@
|
||||
//#region src/types.d.ts
|
||||
type SyntaxElement = {
|
||||
range: [number, number];
|
||||
};
|
||||
type TokenFilter<E extends SyntaxElement, R extends E = E> = ((tokenOrComment: E) => tokenOrComment is R) | ((tokenOrComment: E) => boolean);
|
||||
type CursorWithSkipOptionsWithoutFilter = number | {
|
||||
includeComments?: false;
|
||||
filter?: undefined;
|
||||
skip?: number;
|
||||
};
|
||||
type CursorWithSkipOptionsWithFilter<Token extends SyntaxElement, R extends Token = Token> = TokenFilter<Token, R> | {
|
||||
includeComments?: false;
|
||||
filter: TokenFilter<Token, R>;
|
||||
skip?: number;
|
||||
};
|
||||
type CursorWithSkipOptionsWithComment<Token extends SyntaxElement, Comment extends SyntaxElement, R extends Token | Comment = Token | Comment> = {
|
||||
includeComments: true;
|
||||
filter?: TokenFilter<Token | Comment, R>;
|
||||
skip?: number;
|
||||
};
|
||||
type CursorWithCountOptionsWithoutFilter = number | {
|
||||
includeComments?: false;
|
||||
filter?: undefined;
|
||||
count?: number;
|
||||
};
|
||||
type CursorWithCountOptionsWithFilter<Token extends SyntaxElement, R extends Token = Token> = TokenFilter<Token, R> | {
|
||||
includeComments?: false;
|
||||
filter: TokenFilter<Token, R>;
|
||||
count?: number;
|
||||
};
|
||||
type CursorWithCountOptionsWithComment<Token extends SyntaxElement, Comment extends SyntaxElement, R extends Token | Comment = Token | Comment> = {
|
||||
includeComments: true;
|
||||
filter?: TokenFilter<Token | Comment, R>;
|
||||
count?: number;
|
||||
};
|
||||
//#endregion
|
||||
//#region src/token-store/token-store.d.ts
|
||||
declare const PRIVATE: unique symbol;
|
||||
declare class TokenStore<Node extends SyntaxElement, Token extends SyntaxElement, Comment extends SyntaxElement> {
|
||||
private readonly [PRIVATE];
|
||||
constructor(params: {
|
||||
tokens: (Token | Comment)[];
|
||||
isComment: (token: Token | Comment) => token is Comment;
|
||||
});
|
||||
/**
|
||||
* Gets all tokens, including comments.
|
||||
*/
|
||||
getAllTokens(): (Token | Comment)[];
|
||||
/**
|
||||
* Gets all comments.
|
||||
*/
|
||||
getAllComments(): Comment[];
|
||||
/**
|
||||
* Gets the first token of the given node.
|
||||
*/
|
||||
getFirstToken(node: Node | Token): Token;
|
||||
/**
|
||||
* Gets the first token of the given node with simple options.
|
||||
*/
|
||||
getFirstToken(node: Node | Token | Comment, options?: CursorWithSkipOptionsWithoutFilter): Token | null;
|
||||
/**
|
||||
* Gets the first token of the given node with options.
|
||||
*/
|
||||
getFirstToken<R extends Token>(node: Node | Token | Comment, options: CursorWithSkipOptionsWithFilter<Token, R>): R | null;
|
||||
/**
|
||||
* Gets the first token of the given node with options.
|
||||
*/
|
||||
getFirstToken<R extends Token | Comment>(node: Node | Token | Comment, options: CursorWithSkipOptionsWithComment<Token, Comment, R>): R | null;
|
||||
/**
|
||||
* Gets the first token of the given node with complex options.
|
||||
*/
|
||||
getFirstToken<R extends Token | Comment>(node: Node | Token | Comment, options?: CursorWithSkipOptionsWithoutFilter | CursorWithSkipOptionsWithFilter<Token, R & Token> | CursorWithSkipOptionsWithComment<Token, Comment, R>): R | null;
|
||||
/**
|
||||
* Gets the first tokens of the given node.
|
||||
*/
|
||||
getFirstTokens(node: Node | Token | Comment, options?: CursorWithCountOptionsWithoutFilter): Token[];
|
||||
/**
|
||||
* Gets the first tokens of the given node.
|
||||
*/
|
||||
getFirstTokens<R extends Token>(node: Node | Token | Comment, options: CursorWithCountOptionsWithFilter<Token, R>): R[];
|
||||
/**
|
||||
* Gets the first tokens of the given node with comment options.
|
||||
*/
|
||||
getFirstTokens<R extends Token | Comment>(node: Node | Token | Comment, options: CursorWithCountOptionsWithComment<Token, Comment, R>): R[];
|
||||
/**
|
||||
* Gets the first tokens of the given node with complex options.
|
||||
*/
|
||||
getFirstTokens<R extends Token | Comment>(node: Node | Token | Comment, options?: CursorWithCountOptionsWithoutFilter | CursorWithCountOptionsWithFilter<Token, R & Token> | CursorWithCountOptionsWithComment<Token, Comment, R>): R[];
|
||||
/**
|
||||
* Gets the last token of the given node.
|
||||
*/
|
||||
getLastToken(node: Node | Token): Token;
|
||||
/**
|
||||
* Gets the last token of the given node with options.
|
||||
*/
|
||||
getLastToken(node: Node | Token | Comment, options?: CursorWithSkipOptionsWithoutFilter): Token | null;
|
||||
/**
|
||||
* Gets the last token of the given node with options.
|
||||
*/
|
||||
getLastToken<R extends Token>(node: Node | Token | Comment, options: CursorWithSkipOptionsWithFilter<Token, R>): R | null;
|
||||
/**
|
||||
* Gets the last token of the given node with options.
|
||||
*/
|
||||
getLastToken<R extends Token | Comment>(node: Node | Token | Comment, options: CursorWithSkipOptionsWithComment<Token, Comment, R>): R | null;
|
||||
/**
|
||||
* Gets the last token of the given node with complex options.
|
||||
*/
|
||||
getLastToken<R extends Token | Comment>(node: Node | Token | Comment, options?: CursorWithSkipOptionsWithoutFilter | CursorWithSkipOptionsWithFilter<Token, R & Token> | CursorWithSkipOptionsWithComment<Token, Comment, R>): R | null;
|
||||
/**
|
||||
* Get the last tokens of the given node.
|
||||
*/
|
||||
getLastTokens(node: Node | Token | Comment, options?: CursorWithCountOptionsWithoutFilter): Token[];
|
||||
/**
|
||||
* Get the last tokens of the given node.
|
||||
*/
|
||||
getLastTokens<R extends Token>(node: Node | Token | Comment, options: CursorWithCountOptionsWithFilter<Token, R>): R[];
|
||||
/**
|
||||
* Get the last tokens of the given node with comment options.
|
||||
*/
|
||||
getLastTokens<R extends Token | Comment>(node: Node | Token | Comment, options: CursorWithCountOptionsWithComment<Token, Comment, R>): R[];
|
||||
/**
|
||||
* Get the last tokens of the given node with complex options.
|
||||
*/
|
||||
getLastTokens<R extends Token | Comment>(node: Node | Token | Comment, options?: CursorWithCountOptionsWithoutFilter | CursorWithCountOptionsWithFilter<Token, R & Token> | CursorWithCountOptionsWithComment<Token, Comment, R>): R[];
|
||||
/**
|
||||
* Gets the token that follows a given node or token.
|
||||
*/
|
||||
getTokenAfter(node: Node | Token | Comment, options?: CursorWithSkipOptionsWithoutFilter): Token | null;
|
||||
/**
|
||||
* Gets the token that follows a given node or token.
|
||||
*/
|
||||
getTokenAfter<R extends Token>(node: Node | Token | Comment, options?: CursorWithSkipOptionsWithFilter<Token, R>): R | null;
|
||||
/**
|
||||
* Gets the token that follows a given node or token with comment options.
|
||||
*/
|
||||
getTokenAfter<R extends Token | Comment>(node: Node | Token | Comment, options: CursorWithSkipOptionsWithComment<Token, Comment, R>): R | null;
|
||||
/**
|
||||
* Gets the token that follows a given node or token with complex options.
|
||||
*/
|
||||
getTokenAfter<R extends Token | Comment>(node: Node | Token | Comment, options?: CursorWithSkipOptionsWithoutFilter | CursorWithSkipOptionsWithFilter<Token, R & Token> | CursorWithSkipOptionsWithComment<Token, Comment, R>): R | null;
|
||||
/**
|
||||
* Gets the `count` tokens that follows a given node or token.
|
||||
*/
|
||||
getTokensAfter(node: Node | Token | Comment, options?: CursorWithCountOptionsWithoutFilter): Token[];
|
||||
/**
|
||||
* Gets the `count` tokens that follows a given node or token.
|
||||
*/
|
||||
getTokensAfter<R extends Token>(node: Node | Token | Comment, options: CursorWithCountOptionsWithFilter<Token, R>): R[];
|
||||
/**
|
||||
* Gets the `count` tokens that follows a given node or token with comment options.
|
||||
*/
|
||||
getTokensAfter<R extends Token | Comment>(node: Node | Token | Comment, options: CursorWithCountOptionsWithComment<Token, Comment, R>): R[];
|
||||
/**
|
||||
* Gets the `count` tokens that follows a given node or token with complex options.
|
||||
*/
|
||||
getTokensAfter<R extends Token | Comment>(node: Node | Token | Comment, options?: CursorWithCountOptionsWithoutFilter | CursorWithCountOptionsWithFilter<Token, R & Token> | CursorWithCountOptionsWithComment<Token, Comment, R>): R[];
|
||||
/**
|
||||
* Gets the token that precedes a given node or token.
|
||||
*/
|
||||
getTokenBefore(node: Node | Token | Comment, options?: CursorWithSkipOptionsWithoutFilter): Token | null;
|
||||
/**
|
||||
* Gets the token that precedes a given node or token.
|
||||
*/
|
||||
getTokenBefore<R extends Token>(node: Node | Token | Comment, options: CursorWithSkipOptionsWithFilter<Token, R>): R | null;
|
||||
/**
|
||||
* Gets the token that precedes a given node or token with comment options.
|
||||
*/
|
||||
getTokenBefore<R extends Token | Comment>(node: Node | Token | Comment, options: CursorWithSkipOptionsWithComment<Token, Comment, R>): R | null;
|
||||
/**
|
||||
* Gets the token that precedes a given node or token with complex options.
|
||||
*/
|
||||
getTokenBefore<R extends Token | Comment>(node: Node | Token | Comment, options?: CursorWithSkipOptionsWithoutFilter | CursorWithSkipOptionsWithFilter<Token, R & Token> | CursorWithSkipOptionsWithComment<Token, Comment, R>): R | null;
|
||||
/**
|
||||
* Gets the `count` tokens that precedes a given node or token.
|
||||
*/
|
||||
getTokensBefore(node: Node | Token | Comment, options?: CursorWithCountOptionsWithoutFilter): Token[];
|
||||
/**
|
||||
* Gets the `count` tokens that precedes a given node or token.
|
||||
*/
|
||||
getTokensBefore<R extends Token>(node: Node | Token | Comment, options: CursorWithCountOptionsWithFilter<Token, R>): R[];
|
||||
/**
|
||||
* Gets the `count` tokens that precedes a given node or token with comment options.
|
||||
*/
|
||||
getTokensBefore<R extends Token | Comment>(node: Node | Token | Comment, options: CursorWithCountOptionsWithComment<Token, Comment, R>): R[];
|
||||
/**
|
||||
* Gets the `count` tokens that precedes a given node or token with complex options.
|
||||
*/
|
||||
getTokensBefore<R extends Token | Comment>(node: Node | Token | Comment, options?: CursorWithCountOptionsWithoutFilter | CursorWithCountOptionsWithFilter<Token, R & Token> | CursorWithCountOptionsWithComment<Token, Comment, R>): R[];
|
||||
/**
|
||||
* Gets the first token between two non-overlapping nodes.
|
||||
*/
|
||||
getFirstTokenBetween(left: Node | Token | Comment, right: Node | Token | Comment, options?: CursorWithSkipOptionsWithoutFilter): Token | null;
|
||||
/**
|
||||
* Gets the first token between two non-overlapping nodes.
|
||||
*/
|
||||
getFirstTokenBetween<R extends Token>(left: Node | Token | Comment, right: Node | Token | Comment, options: CursorWithSkipOptionsWithFilter<Token, R>): R | null;
|
||||
/**
|
||||
* Gets the first token between two non-overlapping nodes with comment options.
|
||||
*/
|
||||
getFirstTokenBetween<R extends Token | Comment>(left: Node | Token | Comment, right: Node | Token | Comment, options: CursorWithSkipOptionsWithComment<Token, Comment, R>): R | null;
|
||||
/**
|
||||
* Gets the first token between two non-overlapping nodes with complex options.
|
||||
*/
|
||||
getFirstTokenBetween<R extends Token | Comment>(left: Node | Token | Comment, right: Node | Token | Comment, options?: CursorWithSkipOptionsWithoutFilter | CursorWithSkipOptionsWithFilter<Token, R & Token> | CursorWithSkipOptionsWithComment<Token, Comment, R>): R | null;
|
||||
/**
|
||||
* Gets the first tokens between two non-overlapping nodes.
|
||||
*/
|
||||
getFirstTokensBetween(left: Node | Token | Comment, right: Node | Token | Comment, options?: CursorWithCountOptionsWithoutFilter): Token[];
|
||||
/**
|
||||
* Gets the first tokens between two non-overlapping nodes.
|
||||
*/
|
||||
getFirstTokensBetween<R extends Token>(left: Node | Token | Comment, right: Node | Token | Comment, options: CursorWithCountOptionsWithFilter<Token, R>): R[];
|
||||
/**
|
||||
* Gets the first tokens between two non-overlapping nodes with comment options.
|
||||
*/
|
||||
getFirstTokensBetween<R extends Token | Comment>(left: Node | Token | Comment, right: Node | Token | Comment, options: CursorWithCountOptionsWithComment<Token, Comment, R>): R[];
|
||||
/**
|
||||
* Gets the first tokens between two non-overlapping nodes with complex options.
|
||||
*/
|
||||
getFirstTokensBetween<R extends Token | Comment>(left: Node | Token | Comment, right: Node | Token | Comment, options?: CursorWithCountOptionsWithoutFilter | CursorWithCountOptionsWithFilter<Token, R & Token> | CursorWithCountOptionsWithComment<Token, Comment, R>): R[];
|
||||
/**
|
||||
* Gets the last token between two non-overlapping nodes.
|
||||
*/
|
||||
getLastTokenBetween(left: Node | Token | Comment, right: Node | Token | Comment, options?: CursorWithSkipOptionsWithoutFilter): Token | null;
|
||||
/**
|
||||
* Gets the last token between two non-overlapping nodes.
|
||||
*/
|
||||
getLastTokenBetween<R extends Token>(left: Node | Token | Comment, right: Node | Token | Comment, options: CursorWithSkipOptionsWithFilter<Token, R>): R | null;
|
||||
/**
|
||||
* Gets the last token between two non-overlapping nodes with comment options.
|
||||
*/
|
||||
getLastTokenBetween<R extends Token | Comment>(left: Node | Token | Comment, right: Node | Token | Comment, options: CursorWithSkipOptionsWithComment<Token, Comment, R>): R | null;
|
||||
/**
|
||||
* Gets the last token between two non-overlapping nodes with complex options.
|
||||
*/
|
||||
getLastTokenBetween<R extends Token | Comment>(left: Node | Token | Comment, right: Node | Token | Comment, options?: CursorWithSkipOptionsWithoutFilter | CursorWithSkipOptionsWithFilter<Token, R & Token> | CursorWithSkipOptionsWithComment<Token, Comment, R>): R | null;
|
||||
/**
|
||||
* Gets the last tokens between two non-overlapping nodes.
|
||||
*/
|
||||
getLastTokensBetween(left: Node | Token | Comment, right: Node | Token | Comment, options?: CursorWithCountOptionsWithoutFilter): Token[];
|
||||
/**
|
||||
* Gets the last tokens between two non-overlapping nodes.
|
||||
*/
|
||||
getLastTokensBetween<R extends Token>(left: Node | Token | Comment, right: Node | Token | Comment, options: CursorWithCountOptionsWithFilter<Token, R>): R[];
|
||||
/**
|
||||
* Gets the last tokens between two non-overlapping nodes with comment options.
|
||||
*/
|
||||
getLastTokensBetween<R extends Token | Comment>(left: Node | Token | Comment, right: Node | Token | Comment, options: CursorWithCountOptionsWithComment<Token, Comment, R>): R[];
|
||||
/**
|
||||
* Gets the last tokens between two non-overlapping nodes with complex options.
|
||||
*/
|
||||
getLastTokensBetween<R extends Token | Comment>(left: Node | Token | Comment, right: Node | Token | Comment, options?: CursorWithCountOptionsWithoutFilter | CursorWithCountOptionsWithFilter<Token, R & Token> | CursorWithCountOptionsWithComment<Token, Comment, R>): R[];
|
||||
/**
|
||||
* Gets all tokens that are related to the given node.
|
||||
*/
|
||||
getTokens(node: Node | Token | Comment, options?: CursorWithCountOptionsWithoutFilter): Token[];
|
||||
/**
|
||||
* Gets all tokens that are related to the given node.
|
||||
*/
|
||||
getTokens<R extends Token>(node: Node | Token | Comment, options?: CursorWithCountOptionsWithFilter<Token, R>): R[];
|
||||
/**
|
||||
* Gets all tokens that are related to the given node with comment options.
|
||||
*/
|
||||
getTokens<R extends Token | Comment>(node: Node | Token | Comment, options: CursorWithCountOptionsWithComment<Token, Comment, R>): R[];
|
||||
/**
|
||||
* Gets all tokens that are related to the given node with complex options.
|
||||
*/
|
||||
getTokens<R extends Token | Comment>(node: Node | Token | Comment, options?: CursorWithCountOptionsWithoutFilter | CursorWithCountOptionsWithFilter<Token, R & Token> | CursorWithCountOptionsWithComment<Token, Comment, R>): R[];
|
||||
/**
|
||||
* Gets all of the tokens between two non-overlapping nodes.
|
||||
*/
|
||||
getTokensBetween(left: Node | Token | Comment, right: Node | Token | Comment, options?: CursorWithCountOptionsWithoutFilter): Token[];
|
||||
/**
|
||||
* Gets all of the tokens between two non-overlapping nodes.
|
||||
*/
|
||||
getTokensBetween<R extends Token>(left: Node | Token | Comment, right: Node | Token | Comment, options?: CursorWithCountOptionsWithFilter<Token, R>): R[];
|
||||
/**
|
||||
* Gets all of the tokens between two non-overlapping nodes with comment options.
|
||||
*/
|
||||
getTokensBetween<R extends Token | Comment>(left: Node | Token | Comment, right: Node | Token | Comment, options: CursorWithCountOptionsWithComment<Token, Comment, R>): R[];
|
||||
/**
|
||||
* Gets all of the tokens between two non-overlapping nodes with complex options.
|
||||
*/
|
||||
getTokensBetween<R extends Token | Comment>(left: Node | Token | Comment, right: Node | Token | Comment, paddingOrOptions?: CursorWithCountOptionsWithoutFilter | CursorWithCountOptionsWithFilter<Token, R & Token> | CursorWithCountOptionsWithComment<Token, Comment, R>): R[];
|
||||
/**
|
||||
* Gets all comment tokens inside the given node or token.
|
||||
*/
|
||||
getCommentsInside(nodeOrToken: Node | Token | Comment): Comment[];
|
||||
/**
|
||||
* Gets all comment tokens directly before the given node or token.
|
||||
*/
|
||||
getCommentsBefore(nodeOrToken: Node | Token | Comment): Comment[];
|
||||
/**
|
||||
* Gets all comment tokens directly after the given node or token.
|
||||
*/
|
||||
getCommentsAfter(nodeOrToken: Node | Token | Comment): Comment[];
|
||||
/**
|
||||
* Checks if there are any comment tokens between two non-overlapping nodes.
|
||||
*/
|
||||
commentsExistBetween(left: Node | Token | Comment, right: Node | Token | Comment): boolean;
|
||||
/**
|
||||
* Checks if there is whitespace between two non-overlapping nodes.
|
||||
*/
|
||||
isSpaceBetween(left: Node | Token | Comment, right: Node | Token | Comment): boolean;
|
||||
}
|
||||
//#endregion
|
||||
//#region src/index.d.ts
|
||||
declare const meta: {
|
||||
name: string;
|
||||
version: string;
|
||||
};
|
||||
//#endregion
|
||||
export { type CursorWithCountOptionsWithComment, type CursorWithCountOptionsWithFilter, type CursorWithCountOptionsWithoutFilter, type CursorWithSkipOptionsWithComment, type CursorWithSkipOptionsWithFilter, type CursorWithSkipOptionsWithoutFilter, type SyntaxElement, type TokenFilter, TokenStore, meta };
|
||||
+465
@@ -0,0 +1,465 @@
|
||||
//#region rolldown:runtime
|
||||
var __defProp = Object.defineProperty;
|
||||
var __exportAll = (all, symbols) => {
|
||||
let target = {};
|
||||
for (var name$2 in all) {
|
||||
__defProp(target, name$2, {
|
||||
get: all[name$2],
|
||||
enumerable: true
|
||||
});
|
||||
}
|
||||
if (symbols) {
|
||||
__defProp(target, Symbol.toStringTag, { value: "Module" });
|
||||
}
|
||||
return target;
|
||||
};
|
||||
|
||||
//#endregion
|
||||
//#region src/token-store/token-store.ts
|
||||
/**
|
||||
* Binary search for the index of the first token that is after the given location.
|
||||
*/
|
||||
function search(tokens, location) {
|
||||
let minIndex = 0;
|
||||
let maxIndex = tokens.length - 1;
|
||||
while (minIndex <= maxIndex) {
|
||||
const index = Math.floor((minIndex + maxIndex) / 2);
|
||||
const tokenStartLocation = tokens[index].range[0];
|
||||
if (tokenStartLocation < location) minIndex = index + 1;
|
||||
else if (tokenStartLocation > location) maxIndex = index - 1;
|
||||
else return index;
|
||||
}
|
||||
return minIndex;
|
||||
}
|
||||
/**
|
||||
* Get the index of the first token that is after the given location.
|
||||
*/
|
||||
function getFirstIndex(tokens, indexMap, startLoc) {
|
||||
let index = indexMap.get(startLoc);
|
||||
if (index == null) index = search(tokens, startLoc);
|
||||
while (index < tokens.length && tokens[index].range[1] <= tokens[index].range[0]) index++;
|
||||
return index;
|
||||
}
|
||||
/**
|
||||
* Get the index of the last token that is before the given location.
|
||||
*/
|
||||
function getLastIndex(tokens, indexMap, endLoc) {
|
||||
let index = indexMap.get(endLoc);
|
||||
if (index != null) index--;
|
||||
else index = search(tokens, endLoc) - 1;
|
||||
while (index >= 0 && tokens[index].range[1] <= tokens[index].range[0]) index--;
|
||||
return index;
|
||||
}
|
||||
/**
|
||||
* Normalizes the options for cursor methods.
|
||||
*/
|
||||
function normalizeSkipOptions(options, ctx) {
|
||||
if (typeof options === "number") return {
|
||||
filter: ctx.isNotComment,
|
||||
skip: options
|
||||
};
|
||||
if (typeof options === "function") return {
|
||||
filter: (n) => {
|
||||
if (ctx.isComment(n)) return false;
|
||||
return options(n);
|
||||
},
|
||||
skip: 0
|
||||
};
|
||||
let filter;
|
||||
if (options?.includeComments) filter = options?.filter ?? (() => true);
|
||||
else if (options?.filter) {
|
||||
const baseFilter = options?.filter;
|
||||
filter = (token) => {
|
||||
if (ctx.isComment(token)) return false;
|
||||
return baseFilter(token);
|
||||
};
|
||||
} else filter = ctx.isNotComment;
|
||||
return {
|
||||
filter,
|
||||
skip: options?.skip ?? 0
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Normalizes the options for cursor methods with count.
|
||||
*/
|
||||
function normalizeCountOptions(options, ctx) {
|
||||
if (typeof options === "number") return {
|
||||
filter: ctx.isNotComment,
|
||||
count: options
|
||||
};
|
||||
if (typeof options === "function") return {
|
||||
filter: (n) => {
|
||||
if (ctx.isComment(n)) return false;
|
||||
return options(n);
|
||||
},
|
||||
count: 0
|
||||
};
|
||||
let filter;
|
||||
if (options?.includeComments) filter = options?.filter ?? (() => true);
|
||||
else if (options?.filter) {
|
||||
const baseFilter = options?.filter;
|
||||
filter = (token) => {
|
||||
if (ctx.isComment(token)) return false;
|
||||
return baseFilter(token);
|
||||
};
|
||||
} else filter = ctx.isNotComment;
|
||||
return {
|
||||
filter,
|
||||
count: options?.count ?? 0
|
||||
};
|
||||
}
|
||||
const PRIVATE = Symbol("private");
|
||||
var TokenStore = class {
|
||||
[PRIVATE];
|
||||
constructor(params) {
|
||||
const allTokens = [...params.tokens].sort((a, b) => a.range[0] - b.range[0]);
|
||||
const tokenStartToIndex = /* @__PURE__ */ new Map();
|
||||
for (let i = 0; i < allTokens.length; i++) {
|
||||
const token = allTokens[i];
|
||||
if (token.range[0] < token.range[1]) tokenStartToIndex.set(token.range[0], i);
|
||||
}
|
||||
this[PRIVATE] = {
|
||||
allTokens,
|
||||
tokenStartToIndex,
|
||||
ctx: {
|
||||
isComment: params.isComment,
|
||||
isNotComment: (token) => !params.isComment(token)
|
||||
},
|
||||
cacheAllComments: null
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Gets all tokens, including comments.
|
||||
*/
|
||||
getAllTokens() {
|
||||
return this[PRIVATE].allTokens;
|
||||
}
|
||||
/**
|
||||
* Gets all comments.
|
||||
*/
|
||||
getAllComments() {
|
||||
const { ctx, allTokens, cacheAllComments } = this[PRIVATE];
|
||||
if (cacheAllComments) return cacheAllComments;
|
||||
const result = [];
|
||||
for (const token of allTokens) if (ctx.isComment(token)) result.push(token);
|
||||
this[PRIVATE].cacheAllComments = result;
|
||||
return result;
|
||||
}
|
||||
getFirstToken(node, options) {
|
||||
const { ctx, allTokens, tokenStartToIndex } = this[PRIVATE];
|
||||
const { filter, skip } = normalizeSkipOptions(options, ctx);
|
||||
const startIndex = getFirstIndex(allTokens, tokenStartToIndex, node.range[0]);
|
||||
const endIndex = getLastIndex(allTokens, tokenStartToIndex, node.range[1]);
|
||||
let skipped = 0;
|
||||
for (let i = startIndex; i <= endIndex && i < allTokens.length; i++) {
|
||||
const token = allTokens[i];
|
||||
if (filter && !filter(token)) continue;
|
||||
if (skipped < skip) {
|
||||
skipped++;
|
||||
continue;
|
||||
}
|
||||
return token;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
getFirstTokens(node, options) {
|
||||
const { ctx, allTokens, tokenStartToIndex } = this[PRIVATE];
|
||||
const { filter, count } = normalizeCountOptions(options, ctx);
|
||||
const startIndex = getFirstIndex(allTokens, tokenStartToIndex, node.range[0]);
|
||||
const endIndex = getLastIndex(allTokens, tokenStartToIndex, node.range[1]);
|
||||
const result = [];
|
||||
for (let i = startIndex; i <= endIndex && i < allTokens.length; i++) {
|
||||
const token = allTokens[i];
|
||||
if (filter && !filter(token)) continue;
|
||||
result.push(token);
|
||||
if (count > 0 && result.length >= count) break;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
getLastToken(node, options) {
|
||||
const { ctx, allTokens, tokenStartToIndex } = this[PRIVATE];
|
||||
const { filter, skip } = normalizeSkipOptions(options, ctx);
|
||||
const startIndex = getFirstIndex(allTokens, tokenStartToIndex, node.range[0]);
|
||||
const endIndex = getLastIndex(allTokens, tokenStartToIndex, node.range[1]);
|
||||
let skipped = 0;
|
||||
for (let i = endIndex; i >= startIndex && i >= 0; i--) {
|
||||
const token = allTokens[i];
|
||||
if (filter && !filter(token)) continue;
|
||||
if (skipped < skip) {
|
||||
skipped++;
|
||||
continue;
|
||||
}
|
||||
return token;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
getLastTokens(node, options) {
|
||||
const { ctx, allTokens, tokenStartToIndex } = this[PRIVATE];
|
||||
const { filter, count } = normalizeCountOptions(options, ctx);
|
||||
const startIndex = getFirstIndex(allTokens, tokenStartToIndex, node.range[0]);
|
||||
const endIndex = getLastIndex(allTokens, tokenStartToIndex, node.range[1]);
|
||||
const result = [];
|
||||
for (let i = endIndex; i >= startIndex && i >= 0; i--) {
|
||||
const token = allTokens[i];
|
||||
if (filter && !filter(token)) continue;
|
||||
result.unshift(token);
|
||||
if (count > 0 && result.length >= count) break;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
/**
|
||||
* Gets the token that follows a given node or token.
|
||||
*/
|
||||
getTokenAfter(node, options) {
|
||||
const { ctx, allTokens, tokenStartToIndex } = this[PRIVATE];
|
||||
const { filter, skip } = normalizeSkipOptions(options, ctx);
|
||||
const startIndex = getFirstIndex(allTokens, tokenStartToIndex, node.range[1]);
|
||||
let skipped = 0;
|
||||
for (let i = startIndex; i < allTokens.length; i++) {
|
||||
const token = allTokens[i];
|
||||
if (filter && !filter(token)) continue;
|
||||
if (skipped < skip) {
|
||||
skipped++;
|
||||
continue;
|
||||
}
|
||||
return token;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
getTokensAfter(node, options) {
|
||||
const { ctx, allTokens, tokenStartToIndex } = this[PRIVATE];
|
||||
const { filter, count } = normalizeCountOptions(options, ctx);
|
||||
const startIndex = getFirstIndex(allTokens, tokenStartToIndex, node.range[1]);
|
||||
const result = [];
|
||||
for (let i = startIndex; i < allTokens.length; i++) {
|
||||
const token = allTokens[i];
|
||||
if (filter && !filter(token)) continue;
|
||||
result.push(token);
|
||||
if (count > 0 && result.length >= count) break;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
/**
|
||||
* Gets the token that precedes a given node or token.
|
||||
*/
|
||||
getTokenBefore(node, options) {
|
||||
const { ctx, allTokens, tokenStartToIndex } = this[PRIVATE];
|
||||
const { filter, skip } = normalizeSkipOptions(options, ctx);
|
||||
const endIndex = getLastIndex(allTokens, tokenStartToIndex, node.range[0]);
|
||||
let skipped = 0;
|
||||
for (let i = endIndex; i >= 0; i--) {
|
||||
const token = allTokens[i];
|
||||
if (filter && !filter(token)) continue;
|
||||
if (skipped < skip) {
|
||||
skipped++;
|
||||
continue;
|
||||
}
|
||||
return token;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
/**
|
||||
* Gets the `count` tokens that precedes a given node or token.
|
||||
*/
|
||||
getTokensBefore(node, options) {
|
||||
const { ctx, allTokens, tokenStartToIndex } = this[PRIVATE];
|
||||
const { filter, count } = normalizeCountOptions(options, ctx);
|
||||
const endIndex = getLastIndex(allTokens, tokenStartToIndex, node.range[0]);
|
||||
const result = [];
|
||||
for (let i = endIndex; i >= 0; i--) {
|
||||
const token = allTokens[i];
|
||||
if (filter && !filter(token)) continue;
|
||||
result.unshift(token);
|
||||
if (count > 0 && result.length >= count) break;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
getFirstTokenBetween(left, right, options) {
|
||||
const { ctx, allTokens, tokenStartToIndex } = this[PRIVATE];
|
||||
const { filter, skip } = normalizeSkipOptions(options, ctx);
|
||||
const startIndex = getFirstIndex(allTokens, tokenStartToIndex, left.range[1]);
|
||||
const endIndex = getLastIndex(allTokens, tokenStartToIndex, right.range[0]);
|
||||
let skipped = 0;
|
||||
for (let i = startIndex; i <= endIndex && i < allTokens.length; i++) {
|
||||
const token = allTokens[i];
|
||||
if (filter && !filter(token)) continue;
|
||||
if (skipped < skip) {
|
||||
skipped++;
|
||||
continue;
|
||||
}
|
||||
return token;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
getFirstTokensBetween(left, right, options) {
|
||||
const { ctx, allTokens, tokenStartToIndex } = this[PRIVATE];
|
||||
const { filter, count } = normalizeCountOptions(options, ctx);
|
||||
const startIndex = getFirstIndex(allTokens, tokenStartToIndex, left.range[1]);
|
||||
const endIndex = getLastIndex(allTokens, tokenStartToIndex, right.range[0]);
|
||||
const result = [];
|
||||
for (let i = startIndex; i <= endIndex && i < allTokens.length; i++) {
|
||||
const token = allTokens[i];
|
||||
if (filter && !filter(token)) continue;
|
||||
result.push(token);
|
||||
if (count > 0 && result.length >= count) break;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
getLastTokenBetween(left, right, options) {
|
||||
const { ctx, allTokens, tokenStartToIndex } = this[PRIVATE];
|
||||
const { filter, skip } = normalizeSkipOptions(options, ctx);
|
||||
const startIndex = getFirstIndex(allTokens, tokenStartToIndex, left.range[1]);
|
||||
const endIndex = getLastIndex(allTokens, tokenStartToIndex, right.range[0]);
|
||||
let skipped = 0;
|
||||
for (let i = endIndex; i >= startIndex; i--) {
|
||||
const token = allTokens[i];
|
||||
if (filter && !filter(token)) continue;
|
||||
if (skipped < skip) {
|
||||
skipped++;
|
||||
continue;
|
||||
}
|
||||
return token;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
getLastTokensBetween(left, right, options) {
|
||||
const { ctx, allTokens, tokenStartToIndex } = this[PRIVATE];
|
||||
const { filter, count } = normalizeCountOptions(options, ctx);
|
||||
const startIndex = getFirstIndex(allTokens, tokenStartToIndex, left.range[1]);
|
||||
const endIndex = getLastIndex(allTokens, tokenStartToIndex, right.range[0]);
|
||||
const result = [];
|
||||
for (let i = endIndex; i >= startIndex; i--) {
|
||||
const token = allTokens[i];
|
||||
if (filter && !filter(token)) continue;
|
||||
result.unshift(token);
|
||||
if (count > 0 && result.length >= count) break;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
/**
|
||||
* Gets all tokens that are related to the given node.
|
||||
*/
|
||||
getTokens(node, options) {
|
||||
const { ctx, allTokens, tokenStartToIndex } = this[PRIVATE];
|
||||
const { filter, count } = normalizeCountOptions(options, ctx);
|
||||
const startIndex = getFirstIndex(allTokens, tokenStartToIndex, node.range[0]);
|
||||
const endIndex = getLastIndex(allTokens, tokenStartToIndex, node.range[1]);
|
||||
const result = [];
|
||||
for (let i = startIndex; i <= endIndex && i < allTokens.length; i++) {
|
||||
const token = allTokens[i];
|
||||
if (filter && !filter(token)) continue;
|
||||
result.push(token);
|
||||
if (count > 0 && result.length >= count) break;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
/**
|
||||
* Gets all of the tokens between two non-overlapping nodes.
|
||||
*/
|
||||
getTokensBetween(left, right, paddingOrOptions) {
|
||||
const { ctx, allTokens, tokenStartToIndex } = this[PRIVATE];
|
||||
const { filter, count } = normalizeCountOptions(paddingOrOptions, ctx);
|
||||
const startIndex = getFirstIndex(allTokens, tokenStartToIndex, left.range[1]);
|
||||
const endIndex = getLastIndex(allTokens, tokenStartToIndex, right.range[0]);
|
||||
const result = [];
|
||||
for (let i = startIndex; i <= endIndex && i < allTokens.length; i++) {
|
||||
const token = allTokens[i];
|
||||
if (filter && !filter(token)) continue;
|
||||
result.push(token);
|
||||
if (count > 0 && result.length >= count) break;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
/**
|
||||
* Gets all comment tokens inside the given node or token.
|
||||
*/
|
||||
getCommentsInside(nodeOrToken) {
|
||||
const { ctx, allTokens, tokenStartToIndex } = this[PRIVATE];
|
||||
const startIndex = getFirstIndex(allTokens, tokenStartToIndex, nodeOrToken.range[0]);
|
||||
const endIndex = getLastIndex(allTokens, tokenStartToIndex, nodeOrToken.range[1]);
|
||||
const result = [];
|
||||
for (let i = startIndex; i <= endIndex && i < allTokens.length; i++) {
|
||||
const token = allTokens[i];
|
||||
if (ctx.isComment(token)) result.push(token);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
/**
|
||||
* Gets all comment tokens directly before the given node or token.
|
||||
*/
|
||||
getCommentsBefore(nodeOrToken) {
|
||||
const { ctx, allTokens, tokenStartToIndex } = this[PRIVATE];
|
||||
const endIndex = getLastIndex(allTokens, tokenStartToIndex, nodeOrToken.range[0]);
|
||||
const result = [];
|
||||
for (let i = endIndex; i >= 0; i--) {
|
||||
const token = allTokens[i];
|
||||
if (ctx.isComment(token)) result.unshift(token);
|
||||
else break;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
/**
|
||||
* Gets all comment tokens directly after the given node or token.
|
||||
*/
|
||||
getCommentsAfter(nodeOrToken) {
|
||||
const { ctx, allTokens, tokenStartToIndex } = this[PRIVATE];
|
||||
const startIndex = getFirstIndex(allTokens, tokenStartToIndex, nodeOrToken.range[1]);
|
||||
const result = [];
|
||||
for (let i = startIndex; i < allTokens.length; i++) {
|
||||
const token = allTokens[i];
|
||||
if (ctx.isComment(token)) result.push(token);
|
||||
else break;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
/**
|
||||
* Checks if there are any comment tokens between two non-overlapping nodes.
|
||||
*/
|
||||
commentsExistBetween(left, right) {
|
||||
const { ctx, allTokens, tokenStartToIndex } = this[PRIVATE];
|
||||
const startIndex = getFirstIndex(allTokens, tokenStartToIndex, left.range[1]);
|
||||
const endIndex = getLastIndex(allTokens, tokenStartToIndex, right.range[0]);
|
||||
for (let i = startIndex; i <= endIndex && i < allTokens.length; i++) {
|
||||
const token = allTokens[i];
|
||||
if (ctx.isComment(token)) return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
/**
|
||||
* Checks if there is whitespace between two non-overlapping nodes.
|
||||
*/
|
||||
isSpaceBetween(left, right) {
|
||||
if (left.range[1] >= right.range[0]) return false;
|
||||
const { allTokens, tokenStartToIndex } = this[PRIVATE];
|
||||
const startIndex = getFirstIndex(allTokens, tokenStartToIndex, left.range[1]);
|
||||
const endIndex = getLastIndex(allTokens, tokenStartToIndex, right.range[0]);
|
||||
let prev = left;
|
||||
for (let i = startIndex; i <= endIndex && i < allTokens.length; i++) {
|
||||
const token = allTokens[i];
|
||||
if (prev.range[1] < token.range[0]) return true;
|
||||
prev = token;
|
||||
}
|
||||
return prev.range[1] < right.range[0];
|
||||
}
|
||||
};
|
||||
|
||||
//#endregion
|
||||
//#region package.json
|
||||
var name$1 = "@ota-meshi/ast-token-store";
|
||||
var version$1 = "0.3.0";
|
||||
|
||||
//#endregion
|
||||
//#region src/meta.ts
|
||||
var meta_exports = /* @__PURE__ */ __exportAll({
|
||||
name: () => name,
|
||||
version: () => version
|
||||
});
|
||||
const name = name$1;
|
||||
const version = version$1;
|
||||
|
||||
//#endregion
|
||||
//#region src/index.ts
|
||||
const meta = { ...meta_exports };
|
||||
|
||||
//#endregion
|
||||
export { TokenStore, meta };
|
||||
+106
@@ -0,0 +1,106 @@
|
||||
{
|
||||
"name": "@ota-meshi/ast-token-store",
|
||||
"version": "0.3.0",
|
||||
"description": "A class library that provides an API similar to ESLint's token store",
|
||||
"type": "module",
|
||||
"exports": {
|
||||
".": {
|
||||
"import": "./lib/index.mjs",
|
||||
"default": "./lib/index.mjs"
|
||||
},
|
||||
"./package.json": "./package.json"
|
||||
},
|
||||
"files": [
|
||||
"lib"
|
||||
],
|
||||
"engines": {
|
||||
"node": "^20.19.0 || ^22.13.0 || >=24"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "npm run build:tsdown",
|
||||
"build:tsdown": "tsdown",
|
||||
"lint": "npm run lint:js && npm run lint:ts",
|
||||
"lint:js": "eslint .",
|
||||
"lint:ts": "npm run tsc",
|
||||
"tsc": "tsc",
|
||||
"eslint-fix": "eslint . --fix",
|
||||
"test": "npm run mocha -- \"tests/src/**/*.ts\" --reporter=dot --timeout=60000",
|
||||
"test:debug": "node --experimental-strip-types --experimental-transform-types ./node_modules/mocha/bin/mocha.js \"tests/src/**/*.ts\" --reporter=dot --timeout=60000",
|
||||
"test:cover": "c8 --reporter=lcov --reporter=text npm run test:debug",
|
||||
"ts": "node --import=tsx",
|
||||
"mocha": "npm run ts -- ./node_modules/mocha/bin/mocha.js",
|
||||
"update": "node --experimental-strip-types --experimental-transform-types ./tools/update.ts",
|
||||
"generate:version": "npm run update && npm run lint -- --fix",
|
||||
"changeset:version": "changeset version && npm i && npm run generate:version && git add --all",
|
||||
"changeset:publish": "npm run build && changeset publish"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/ota-meshi/ast-token-store.git"
|
||||
},
|
||||
"keywords": [
|
||||
"eslint",
|
||||
"token-store"
|
||||
],
|
||||
"author": "Yosuke Ota",
|
||||
"funding": "https://github.com/sponsors/ota-meshi",
|
||||
"license": "MIT",
|
||||
"bugs": {
|
||||
"url": "https://github.com/ota-meshi/ast-token-store/issues"
|
||||
},
|
||||
"homepage": "https://github.com/ota-meshi/ast-token-store/",
|
||||
"devDependencies": {
|
||||
"@changesets/changelog-github": "^0.5.1",
|
||||
"@changesets/cli": "^2.28.1",
|
||||
"@eslint/core": "^1.0.0",
|
||||
"@eslint/markdown": "^7.4.0",
|
||||
"@ota-meshi/eslint-plugin": "^0.20.0",
|
||||
"@shikijs/vitepress-twoslash": "^3.0.0",
|
||||
"@types/eslint": "^9.6.1",
|
||||
"@types/eslint-scope": "^8.0.0",
|
||||
"@types/eslint-utils": "^3.0.5",
|
||||
"@types/estree": "^1.0.6",
|
||||
"@types/json-schema": "^7.0.15",
|
||||
"@types/mdast": "^4.0.4",
|
||||
"@types/mocha": "^10.0.10",
|
||||
"@types/node": "^24.0.0",
|
||||
"@types/semver": "^7.5.8",
|
||||
"assert": "^2.1.0",
|
||||
"c8": "^10.1.3",
|
||||
"eslint": "^10.0.1",
|
||||
"eslint-compat-utils": "^0.6.4",
|
||||
"eslint-config-prettier": "^10.1.1",
|
||||
"eslint-plugin-eslint-comments": "^3.2.0",
|
||||
"eslint-plugin-eslint-plugin": "^7.0.0",
|
||||
"eslint-plugin-jsdoc": "^62.7.0",
|
||||
"eslint-plugin-json-schema-validator": "^6.0.0",
|
||||
"eslint-plugin-jsonc": "^2.19.1",
|
||||
"eslint-plugin-markdown": "^5.1.0",
|
||||
"eslint-plugin-markdown-links": "^0.7.0",
|
||||
"eslint-plugin-markdown-preferences": "^0.40.2",
|
||||
"eslint-plugin-n": "^17.16.2",
|
||||
"eslint-plugin-node-dependencies": "^1.0.0",
|
||||
"eslint-plugin-prettier": "^5.2.3",
|
||||
"eslint-plugin-regexp": "^3.0.0",
|
||||
"eslint-plugin-vue": "^10.0.0",
|
||||
"eslint-plugin-yml": "^3.0.0",
|
||||
"eslint-snapshot-rule-tester": "^0.1.0",
|
||||
"eslint-typegen": "^2.0.0",
|
||||
"espree": "^11.0.0",
|
||||
"events": "^3.3.0",
|
||||
"globals": "^17.0.0",
|
||||
"mocha": "^11.1.0",
|
||||
"prettier": "^3.5.3",
|
||||
"semver": "^7.7.1",
|
||||
"toml-eslint-parser": "^1.0.3",
|
||||
"tsdown": "^0.19.0",
|
||||
"tsx": "^4.19.3",
|
||||
"typescript": "~5.9.0",
|
||||
"typescript-eslint": "^8.56.0",
|
||||
"vitepress": "^1.6.3",
|
||||
"vue-eslint-parser": "^10.0.0"
|
||||
},
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user