routie dev init since i didn't adhere to any proper guidance up until now
This commit is contained in:
+244
@@ -0,0 +1,244 @@
|
||||
# v1.4.1
|
||||
|
||||
## 1.4.6
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- 02d3c46: Support default values on non-optional names (e.g. `@property {number} BITMASK_VALUE_A=16 - description`)
|
||||
|
||||
## 1.4.5
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- fix: skip empty lines in name tokenizer only when type is present
|
||||
|
||||
## 1.4.4
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- fully exclude slash-containing strings like @npm/package from tag detection @brettz9
|
||||
|
||||
## 1.4.3
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Add prepare and prepublishOnly scripts to ensure build runs before publishing
|
||||
|
||||
## 1.4.2
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- npm audit
|
||||
- ensure type, name and description on subsequent lines are properly parsed. @brettz9
|
||||
- ensure type, name and description on subsequent lines are properly parsed. @brettz9
|
||||
- fix .prettierignore
|
||||
- add source trasformation example
|
||||
|
||||
# v1.4.0
|
||||
|
||||
- ESM compatibility improvements; fixes #159, #161
|
||||
|
||||
# v1.3.1
|
||||
|
||||
- allow for valid empty jsdoc; fixes #128
|
||||
|
||||
# v1.3.0
|
||||
|
||||
- add support for custom block markers
|
||||
|
||||
# v1.2.4
|
||||
|
||||
- reverting engine constraint back to ^12.0.0
|
||||
|
||||
# v1.2.3
|
||||
|
||||
- publishing missing fix: point package's main to .cjs file
|
||||
|
||||
# v1.2.2
|
||||
|
||||
- re-export ./util on the top-level for compatibility with older Node
|
||||
- point package's main to .cjs file
|
||||
|
||||
# v1.2.1
|
||||
|
||||
- bump `engines` per `exports` issues in earlier Node versions
|
||||
|
||||
# v1.2.0
|
||||
|
||||
- keep and handle appropriately CR line endings
|
||||
|
||||
# v1.1.6-beta.3
|
||||
|
||||
- process CRs as a separate .lineEnd toke
|
||||
|
||||
# v1.1.6-beta.2
|
||||
|
||||
- ESM/CJS compatibility fixes
|
||||
|
||||
# v1.1.6-beta.1
|
||||
|
||||
- support native ESM
|
||||
|
||||
# v1.1.6-beta.0
|
||||
|
||||
- keep original CR line endings
|
||||
- allow to normalize line endings with `crlf` transform
|
||||
|
||||
# v1.1.5
|
||||
|
||||
- drop unused variables
|
||||
- add .editorconfig
|
||||
|
||||
# v1.1.4
|
||||
|
||||
- `bugfix` fix unsynced lib/
|
||||
|
||||
# v1.1.3
|
||||
|
||||
- export primitive type on the top level: Markers, Block, Spec, Line, Tokens, Problem
|
||||
|
||||
# v1.1.2
|
||||
|
||||
- `bugfix` Allow to build nested tags from `name.subname` even if `name` wasn't d
|
||||
- `bugfix` Preserve indentation when extracting comments
|
||||
|
||||
# v1.1.1
|
||||
|
||||
- add helpers for rewiring Spec.source <-> Spec.tags.source
|
||||
|
||||
# v1.1.0
|
||||
|
||||
- split tokenizers into separate modules
|
||||
- allow multiline {type} definitions - issue #109
|
||||
- allow using "=>" in [name=default] defaults – issue #112
|
||||
- allow using "=" in quoted [name=default] defaults – issue #112
|
||||
- add tokenizers usage example - issue #111
|
||||
|
||||
# v1.0.0
|
||||
|
||||
- complete rewrite in TS with more flexible API
|
||||
|
||||
# v0.7.6
|
||||
|
||||
- distinct non-critical errors by providing `err.warning`
|
||||
|
||||
# v0.7.5
|
||||
|
||||
- name parsing fixes
|
||||
|
||||
# v0.7.4
|
||||
|
||||
- node 8 backward compatibility fixes
|
||||
|
||||
# v0.7.3
|
||||
|
||||
- make stringify result more close to the source
|
||||
|
||||
# v0.7.2
|
||||
|
||||
- make stringify to start each line with \* in multiline comments
|
||||
|
||||
# v0.7.1
|
||||
|
||||
- ensure non-space characters after asterisk are included in source
|
||||
|
||||
# v0.7.0
|
||||
|
||||
- allow fenced blocks in tag description, see opts.fence
|
||||
|
||||
# v0.6.2
|
||||
|
||||
- document TypeScript definitions
|
||||
|
||||
# v0.6.1
|
||||
|
||||
- adjust strigifier indentation
|
||||
|
||||
# v0.6.0
|
||||
|
||||
- soft-drop node@6 support
|
||||
- migrate to ES6 syntax
|
||||
- allow to generate comments out of parsed data
|
||||
|
||||
# v0.5.5
|
||||
|
||||
- allow loose tag names, e.g. @.tag, @-tag
|
||||
|
||||
# v0.5.4
|
||||
|
||||
- allow quoted literal names, e.g. `@tag "My Var" description`
|
||||
|
||||
# v0.5.3
|
||||
|
||||
- corrected TypeScript definitions
|
||||
|
||||
# v0.5.2
|
||||
|
||||
- added TypeScript definitions
|
||||
- removed `readable-stream` dependency
|
||||
|
||||
# v0.5.1
|
||||
|
||||
- Support for tab as separator between tag components.
|
||||
- Docs: Indicate when `optional` is `true`; `default` property
|
||||
|
||||
# v0.5.0
|
||||
|
||||
- line wrapping control with `opts.join`
|
||||
|
||||
# v0.4.2
|
||||
|
||||
- tolerate inconsistent lines alignment within block
|
||||
|
||||
# v0.4.1
|
||||
|
||||
- refactored parsing, allow to not start lines with "\* " inside block
|
||||
|
||||
# v0.3.2
|
||||
|
||||
- fix RegExp for `description` extraction to allow $ char
|
||||
|
||||
# v0.3.1
|
||||
|
||||
- use `readable-stream` fro Node 0.8 comatibility
|
||||
- allow to pass optional parameters to `parse.file(path [,opts], done)`
|
||||
- allow `parse.stream` to work with Buffers in addition to strings
|
||||
|
||||
# v0.3.0
|
||||
|
||||
- `feature` allow to use custom parsers
|
||||
- `feature` always include source, no `raw_value` option needed
|
||||
- `bugfix` always provide `optional` tag property
|
||||
- `refactor` clean up tests
|
||||
|
||||
# v0.2.3
|
||||
|
||||
- `bugfix` Accept `/** one line */` comments
|
||||
- `refactor` Get rid of `lodash` to avoid unnecessary extra size when bundled
|
||||
|
||||
# v0.2.2
|
||||
|
||||
- `feature` allow spaces in default values `@my-tag {my.type} [name=John Doe]`
|
||||
|
||||
# v0.2.1
|
||||
|
||||
- `refactor` make line pasing mechanism more tolerable
|
||||
|
||||
# v0.2.0
|
||||
|
||||
- `feature` include source line numbers in parsed data
|
||||
- `feature` optionally prevent dotten names expanding
|
||||
|
||||
# v0.1.2
|
||||
|
||||
- `bugfix` Allow to build nested tags from `name.subname` even if `name` wasn't d
|
||||
- `bugfix` Preserve indentation when extracting comments
|
||||
|
||||
# v0.1.1
|
||||
|
||||
- `improvement` `parse(source)` returns array of all blocks found in source or an empty array
|
||||
- `bugfix` fixed indented blocks parsing
|
||||
|
||||
# v0.1.0
|
||||
|
||||
Initial implementation
|
||||
+120
@@ -0,0 +1,120 @@
|
||||
# CLAUDE.md
|
||||
|
||||
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
|
||||
|
||||
## Commands
|
||||
|
||||
```bash
|
||||
npm run build # Compile TypeScript → es6/, lib/ (CommonJS), browser/
|
||||
npm test # Prettier check + Jest (also runs build first via pretest)
|
||||
npm run format # Auto-fix formatting with Prettier
|
||||
```
|
||||
|
||||
Run a single test file:
|
||||
```bash
|
||||
npm test -- tests/unit/parser.spec.ts
|
||||
```
|
||||
|
||||
Run tests matching a pattern:
|
||||
```bash
|
||||
npm test -- --testNamePattern "block with tags"
|
||||
```
|
||||
|
||||
## Architecture
|
||||
|
||||
`comment-parser` is a zero-dependency JSDoc comment parser. It converts `/** */` comment strings into structured data and back (bidirectional).
|
||||
|
||||
### Parse pipeline
|
||||
|
||||
```
|
||||
raw source string
|
||||
→ source-parser (splits into Line[] with Tokens per line)
|
||||
→ block-parser (groups lines into description block + tag sections)
|
||||
→ spec-parser (runs tokenizers on each tag section → Spec[])
|
||||
→ Block[]
|
||||
```
|
||||
|
||||
Each stage is independently configurable. The public `parse(source, options)` function wires them together.
|
||||
|
||||
### Key data types (`src/primitives.ts`)
|
||||
|
||||
- **`Block`** — one `/** */` comment; has `description`, `tags` (Spec[]), `source` (Line[]), `problems`
|
||||
- **`Spec`** — one `@tag`; has `tag`, `type`, `name`, `optional`, `default`, `description`, `source`
|
||||
- **`Line`** — a raw source line with its `tokens`
|
||||
- **`Tokens`** — fine-grained breakdown of a line: `start`, `delimiter`, `postDelimiter`, `tag`, `postTag`, `type`, `postType`, `name`, `postName`, `description`, `end`, `lineEnd`
|
||||
|
||||
### Module layout
|
||||
|
||||
| Directory | Responsibility |
|
||||
|-----------|---------------|
|
||||
| `src/parser/` | `index.ts` (factory), `source-parser.ts`, `block-parser.ts`, `spec-parser.ts` |
|
||||
| `src/parser/tokenizers/` | `tag.ts`, `type.ts`, `name.ts`, `description.ts` — each extracts one Tokens field |
|
||||
| `src/stringifier/` | Converts Block/Tokens back to source string; `inspect.ts` for debugging |
|
||||
| `src/transforms/` | Post-parse transformations: `align`, `indent`, `crlf`, composed via `flow` |
|
||||
| `src/util.ts` | `seedBlock`, `seedTokens`, `rewireSpecs`, `rewireSource` helpers |
|
||||
|
||||
### Customization points
|
||||
|
||||
Tokenizers and the source-line parser are injected via options, so callers can override any parsing step. Transforms are pure functions composed with `transforms.flow(...)`.
|
||||
|
||||
### Build outputs
|
||||
|
||||
- `es6/` — ES modules (primary dev target, what `exports["."]` points to for ESM consumers)
|
||||
- `lib/` — CommonJS (`.cjs` extensions, generated by `convert-extension`)
|
||||
- `browser/` — IIFE bundle via Rollup
|
||||
|
||||
## Playground
|
||||
|
||||
The live demo at `https://syavorsky.github.io/comment-parser` is hosted in a separate public repo: `syavorsky/syavorsky.github.io`, under the `comment-parser/` directory. It is not auto-deployed from this repo — the bundled library is updated manually.
|
||||
|
||||
To update the playground to a new version, in the `syavorsky/syavorsky.github.io` repo:
|
||||
|
||||
```bash
|
||||
cd syavorsky.github.io
|
||||
./comment-parser/upgrade.sh <version> # e.g. ./comment-parser/upgrade.sh 1.4.5
|
||||
```
|
||||
|
||||
This script:
|
||||
1. Installs the specified version of `comment-parser` from npm
|
||||
2. Copies `browser/index.js` and `tests/e2e/examples.js` into `comment-parser/lib/`
|
||||
3. Updates `comment-parser/lib/VERSION`
|
||||
4. Updates the version label in `comment-parser/index.html`
|
||||
5. Commits `package.json`, `package-lock.json`, and `comment-parser/`
|
||||
|
||||
## Branching
|
||||
|
||||
Always work on a dedicated branch, never directly on `main`:
|
||||
|
||||
- **GitHub issue** → `issue-186/non-optional-defaults`
|
||||
- **Bug without an issue** → `bug/short-description`
|
||||
- **Feature without an issue** → `feature/short-description`
|
||||
|
||||
## Versioning & releases
|
||||
|
||||
PRs **must** include a changeset file or carry the `skip-changeset` label (enforced by CI).
|
||||
|
||||
`npm run release:add` is interactive and can't be automated. Instead, create the changeset file directly:
|
||||
|
||||
```bash
|
||||
# .changeset/<random-name>.md
|
||||
---
|
||||
'comment-parser': patch
|
||||
---
|
||||
|
||||
Description of the fix or change.
|
||||
```
|
||||
|
||||
Use `patch` for bug fixes, `minor` for new features, `major` for breaking changes. The filename can be anything unique (e.g. use a random word combo).
|
||||
|
||||
Add the changeset file as a separate commit alongside the code changes.
|
||||
|
||||
### Full release flow
|
||||
|
||||
```bash
|
||||
npm run release:version # bumps version, updates CHANGELOG, commits
|
||||
npm run release:publish # publishes to npm, pushes tags
|
||||
```
|
||||
|
||||
After publishing, update the playground (see above).
|
||||
|
||||
CI tests against Node 20, 22, and 24.
|
||||
+21
@@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014 Sergii Iavorskyi
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
+220
@@ -0,0 +1,220 @@
|
||||
# comment-parser
|
||||
|
||||
`comment-parser` is a library helping to handle Generic JSDoc-style comments. It is
|
||||
|
||||
- **language-agnostic** – no semantics enforced. You decide what tags are and what they mean. And it can be used with any language supporting `/** */` source comments.
|
||||
- **no dependencies** – it is compact and environment-agnostic, can be run on both the server and browser sides
|
||||
- **highly customizable** – with a little code you can deeply customize how comments are parsed
|
||||
- **bidirectional** - you can write comment blocks back to the source after updating or formatting
|
||||
- **strictly typed** - comes with generated `d.ts` data definitions since written in TypeScript
|
||||
|
||||
```sh
|
||||
npm install comment-parser
|
||||
```
|
||||
|
||||
> 💡 Check out the [Playground](https://syavorsky.github.io/comment-parser)
|
||||
|
||||
> 💡 Previous version lives in [0.x](https://github.com/syavorsky/comment-parser/tree/0.x) branch
|
||||
|
||||
Lib mainly provides two pieces [Parser](#Parser) and [Stringifier](#Stringifier).
|
||||
|
||||
## Parser
|
||||
|
||||
Let's go over string parsing:
|
||||
|
||||
```js
|
||||
const { parse } = require('comment-parser/lib')
|
||||
|
||||
const source = `
|
||||
/**
|
||||
* Description may go
|
||||
* over few lines followed by @tags
|
||||
* @param {string} name the name parameter
|
||||
* @param {any} value the value of any type
|
||||
*/`
|
||||
|
||||
const parsed = parse(source)
|
||||
```
|
||||
|
||||
Lib source code is written in TypeScript and all data shapes are conveniently available for your IDE of choice. All types described below can be found in [primitives.ts](src/primitives.ts)
|
||||
|
||||
The input source is first parsed into lines, then lines split into tokens, and finally, tokens are processed into blocks of tags
|
||||
|
||||
### Block
|
||||
|
||||
```js
|
||||
/**
|
||||
* Description may go
|
||||
* over multiple lines followed by @tags
|
||||
* @param {string} name the name parameter
|
||||
* @param {any} value the value parameter
|
||||
*/
|
||||
```
|
||||
|
||||
### Description
|
||||
|
||||
```js
|
||||
/**
|
||||
* Description may go
|
||||
* over multiple lines followed by @tags
|
||||
```
|
||||
|
||||
### Tags
|
||||
|
||||
```js
|
||||
* @param {string} name the name parameter
|
||||
```
|
||||
|
||||
```js
|
||||
* @param {any} value the value parameter
|
||||
*/
|
||||
```
|
||||
|
||||
### Tokens
|
||||
|
||||
```
|
||||
|line|start|delimiter|postDelimiter|tag |postTag|name |postName|type |postType|description |end|
|
||||
|----|-----|---------|-------------|------|-------|-----|--------|--------|--------|--------------------------------|---|
|
||||
| 0|{2} |/** | | | | | | | | | |
|
||||
| 1|{3} |* |{1} | | | | | | |Description may go | |
|
||||
| 2|{3} |* |{1} | | | | | | |over few lines followed by @tags| |
|
||||
| 3|{3} |* |{1} |@param|{1} |name |{1} |{string}|{1} |the name parameter | |
|
||||
| 4|{3} |* |{1} |@param|{1} |value|{1} |{any} |{1} |the value of any type | |
|
||||
| 5|{3} | | | | | | | | | |*/ |
|
||||
```
|
||||
|
||||
### Result
|
||||
|
||||
The result is an array of Block objects, see the full output on the [playground](https://syavorsky.github.io/comment-parser)
|
||||
|
||||
```js
|
||||
[{
|
||||
// uppper text of the comment, overall block description
|
||||
description: 'Description may go over multiple lines followed by @tags',
|
||||
// list of block tags: @param, @param
|
||||
tags: [{
|
||||
// tokens.tag without "@"
|
||||
tag: 'param',
|
||||
// unwrapped tokens.name
|
||||
name: 'name',
|
||||
// unwrapped tokens.type
|
||||
type: 'string',
|
||||
// true, if tokens.name is [optional]
|
||||
optional: false,
|
||||
// default value if optional [name=default] has one
|
||||
default: undefined,
|
||||
// tokens.description assembled from a siongle or multiple lines
|
||||
description: 'the name parameter',
|
||||
// problems occured while parsing this tag section, subset of ../problems array
|
||||
problems: [],
|
||||
// source lines processed for extracting this tag, "slice" of the ../source item reference
|
||||
source: [ ... ],
|
||||
}, ... ],
|
||||
// source is an array of `Line` items having the source
|
||||
// line number and `Tokens` that can be assembled back into
|
||||
// the line string preserving original formatting
|
||||
source: [{
|
||||
// source line number
|
||||
number: 1,
|
||||
// source line string
|
||||
source: "/**",
|
||||
// source line tokens
|
||||
tokens: {
|
||||
// indentation
|
||||
start: "",
|
||||
// delimiter, either '/**', '*/', '*', or ''. Mid lines may have no delimiters
|
||||
delimiter: "/**",
|
||||
// space between delimiter and tag
|
||||
postDelimiter: "",
|
||||
// tag starting with "@"
|
||||
tag: "",
|
||||
// space between tag and type
|
||||
postTag: "",
|
||||
// name with no whitespaces or "multiple words" wrapped into quotes. May occure in [name] and [name=default] forms
|
||||
name: "",
|
||||
// space between name and type
|
||||
postName: "",
|
||||
// type is has to be {wrapped} into curlies otherwise will be omitted
|
||||
type: "",
|
||||
// space between type and description
|
||||
postType: "",
|
||||
// description is basicaly rest of the line
|
||||
description: "",
|
||||
// closing */ marker if present
|
||||
end: ""
|
||||
}
|
||||
}, ... ],
|
||||
// problems occured while parsing the block
|
||||
problems: [],
|
||||
}];
|
||||
```
|
||||
|
||||
While `.source[].tokens` are not providing readable annotation information, they are essential for tracing data origins and assembling string blocks with `stringify`
|
||||
|
||||
### options
|
||||
|
||||
```ts
|
||||
interface Options {
|
||||
// start count for source line numbers
|
||||
startLine: number;
|
||||
// escaping chars sequence marking wrapped content literal for the parser
|
||||
fence: string;
|
||||
// block and comment description compaction strategy
|
||||
spacing: 'compact' | 'preserve';
|
||||
// tokenizer functions extracting name, type, and description out of tag, see Tokenizer
|
||||
tokenizers: Tokenizer[];
|
||||
}
|
||||
```
|
||||
|
||||
examples
|
||||
- [default config](https://syavorsky.github.io/comment-parser/#parse-defaults)
|
||||
- [line numbers control](https://syavorsky.github.io/comment-parser/#parse-line-numbering)
|
||||
- [description spacing](https://syavorsky.github.io/comment-parser/#parse-spacing)
|
||||
- [escaping](https://syavorsky.github.io/comment-parser/#parse-escaping)
|
||||
- [explore the origin source](https://syavorsky.github.io/comment-parser/#parse-source-exploration)
|
||||
|
||||
[suggest more examples](https://github.com/syavorsky/comment-parser/issues/new?title=example+suggestion%3A+...&labels=example,parser)
|
||||
|
||||
## Stringifier
|
||||
|
||||
The stringifier is an important piece used by other tools updating the source code. It goes over `Block.source[].tokens` items and assembles them back to the string. It might be used with various transforms applied before stringifying.
|
||||
|
||||
```js
|
||||
const { parse, stringify, transforms: {flow, align, indent} } = require('comment-parser');
|
||||
|
||||
const source = `
|
||||
/**
|
||||
* Description may go
|
||||
* over multiple lines followed by @tags
|
||||
*
|
||||
* @my-tag {my.type} my-name description line 1
|
||||
description line 2
|
||||
* description line 3
|
||||
*/`;
|
||||
|
||||
const parsed = parse(source);
|
||||
const transform = flow(align(), indent(0))
|
||||
console.log(stringify(transform(parsed[0])));
|
||||
```
|
||||
|
||||
### Result
|
||||
|
||||
```js
|
||||
/**
|
||||
* Description may go
|
||||
* over multiple lines followed by @tags
|
||||
*
|
||||
* @my-tag {my.type} my-name description line 1
|
||||
description line 2
|
||||
* description line 3
|
||||
*/
|
||||
```
|
||||
|
||||
examples
|
||||
- [format comments](https://syavorsky.github.io/comment-parser/#stringify-formatting)
|
||||
|
||||
[suggest more examples](https://github.com/syavorsky/comment-parser/issues/new?title=example+suggestion%3A+...&labels=example,stringifier)
|
||||
|
||||
## Migrating from 0.x version
|
||||
|
||||
Code of pre-1.0 version is forked into [0.x](https://github.com/syavorsky/comment-parser/tree/0.x) and will phase out eventually. Please file the issue if you find some previously existing functionality can't be achieved with 1.x API. Check out [migration notes](migrate-1.0.md).
|
||||
+710
@@ -0,0 +1,710 @@
|
||||
var CommentParser = (function (exports) {
|
||||
'use strict';
|
||||
|
||||
/** @deprecated */
|
||||
exports.Markers = void 0;
|
||||
(function (Markers) {
|
||||
Markers["start"] = "/**";
|
||||
Markers["nostart"] = "/***";
|
||||
Markers["delim"] = "*";
|
||||
Markers["end"] = "*/";
|
||||
})(exports.Markers || (exports.Markers = {}));
|
||||
|
||||
function isSpace(source) {
|
||||
return /^\s+$/.test(source);
|
||||
}
|
||||
function splitCR(source) {
|
||||
const matches = source.match(/\r+$/);
|
||||
return matches == null
|
||||
? ['', source]
|
||||
: [source.slice(-matches[0].length), source.slice(0, -matches[0].length)];
|
||||
}
|
||||
function splitSpace(source) {
|
||||
const matches = source.match(/^\s+/);
|
||||
return matches == null
|
||||
? ['', source]
|
||||
: [source.slice(0, matches[0].length), source.slice(matches[0].length)];
|
||||
}
|
||||
function splitLines(source) {
|
||||
return source.split(/\n/);
|
||||
}
|
||||
function seedBlock(block = {}) {
|
||||
return Object.assign({ description: '', tags: [], source: [], problems: [] }, block);
|
||||
}
|
||||
function seedSpec(spec = {}) {
|
||||
return Object.assign({ tag: '', name: '', type: '', optional: false, description: '', problems: [], source: [] }, spec);
|
||||
}
|
||||
function seedTokens(tokens = {}) {
|
||||
return Object.assign({ start: '', delimiter: '', postDelimiter: '', tag: '', postTag: '', name: '', postName: '', type: '', postType: '', description: '', end: '', lineEnd: '' }, tokens);
|
||||
}
|
||||
/**
|
||||
* Assures Block.tags[].source contains references to the Block.source items,
|
||||
* using Block.source as a source of truth. This is a counterpart of rewireSpecs
|
||||
* @param block parsed coments block
|
||||
*/
|
||||
function rewireSource(block) {
|
||||
const source = block.source.reduce((acc, line) => acc.set(line.number, line), new Map());
|
||||
for (const spec of block.tags) {
|
||||
spec.source = spec.source.map((line) => source.get(line.number));
|
||||
}
|
||||
return block;
|
||||
}
|
||||
/**
|
||||
* Assures Block.source contains references to the Block.tags[].source items,
|
||||
* using Block.tags[].source as a source of truth. This is a counterpart of rewireSource
|
||||
* @param block parsed coments block
|
||||
*/
|
||||
function rewireSpecs(block) {
|
||||
const source = block.tags.reduce((acc, spec) => spec.source.reduce((acc, line) => acc.set(line.number, line), acc), new Map());
|
||||
block.source = block.source.map((line) => source.get(line.number) || line);
|
||||
return block;
|
||||
}
|
||||
|
||||
const reTag = /^@[^\s/]+(?=\s|$)/;
|
||||
/**
|
||||
* Creates configured `Parser`
|
||||
* @param {Partial<Options>} options
|
||||
*/
|
||||
function getParser$3({ fence = '```', } = {}) {
|
||||
const fencer = getFencer(fence);
|
||||
const toggleFence = (source, isFenced) => fencer(source) ? !isFenced : isFenced;
|
||||
return function parseBlock(source) {
|
||||
// start with description section
|
||||
const sections = [[]];
|
||||
let isFenced = false;
|
||||
for (const line of source) {
|
||||
if (reTag.test(line.tokens.description) && !isFenced) {
|
||||
sections.push([line]);
|
||||
}
|
||||
else {
|
||||
sections[sections.length - 1].push(line);
|
||||
}
|
||||
isFenced = toggleFence(line.tokens.description, isFenced);
|
||||
}
|
||||
return sections;
|
||||
};
|
||||
}
|
||||
function getFencer(fence) {
|
||||
if (typeof fence === 'string')
|
||||
return (source) => source.split(fence).length % 2 === 0;
|
||||
return fence;
|
||||
}
|
||||
|
||||
function getParser$2({ startLine = 0, markers = exports.Markers, } = {}) {
|
||||
let block = null;
|
||||
let num = startLine;
|
||||
return function parseSource(source) {
|
||||
let rest = source;
|
||||
const tokens = seedTokens();
|
||||
[tokens.lineEnd, rest] = splitCR(rest);
|
||||
[tokens.start, rest] = splitSpace(rest);
|
||||
if (block === null &&
|
||||
rest.startsWith(markers.start) &&
|
||||
!rest.startsWith(markers.nostart)) {
|
||||
block = [];
|
||||
tokens.delimiter = rest.slice(0, markers.start.length);
|
||||
rest = rest.slice(markers.start.length);
|
||||
[tokens.postDelimiter, rest] = splitSpace(rest);
|
||||
}
|
||||
if (block === null) {
|
||||
num++;
|
||||
return null;
|
||||
}
|
||||
const isClosed = rest.trimRight().endsWith(markers.end);
|
||||
if (tokens.delimiter === '' &&
|
||||
rest.startsWith(markers.delim) &&
|
||||
!rest.startsWith(markers.end)) {
|
||||
tokens.delimiter = markers.delim;
|
||||
rest = rest.slice(markers.delim.length);
|
||||
[tokens.postDelimiter, rest] = splitSpace(rest);
|
||||
}
|
||||
if (isClosed) {
|
||||
const trimmed = rest.trimRight();
|
||||
tokens.end = rest.slice(trimmed.length - markers.end.length);
|
||||
rest = trimmed.slice(0, -markers.end.length);
|
||||
}
|
||||
tokens.description = rest;
|
||||
block.push({ number: num, source, tokens });
|
||||
num++;
|
||||
if (isClosed) {
|
||||
const result = block.slice();
|
||||
block = null;
|
||||
return result;
|
||||
}
|
||||
return null;
|
||||
};
|
||||
}
|
||||
|
||||
function getParser$1({ tokenizers }) {
|
||||
return function parseSpec(source) {
|
||||
var _a;
|
||||
let spec = seedSpec({ source });
|
||||
for (const tokenize of tokenizers) {
|
||||
spec = tokenize(spec);
|
||||
if ((_a = spec.problems[spec.problems.length - 1]) === null || _a === void 0 ? void 0 : _a.critical)
|
||||
break;
|
||||
}
|
||||
return spec;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Splits the `@prefix` from remaining `Spec.lines[].token.description` into the `tag` token,
|
||||
* and populates `spec.tag`
|
||||
*/
|
||||
function tagTokenizer() {
|
||||
return (spec) => {
|
||||
const { tokens } = spec.source[0];
|
||||
const match = tokens.description.match(/\s*(@(\S+))(\s*)/);
|
||||
if (match === null) {
|
||||
spec.problems.push({
|
||||
code: 'spec:tag:prefix',
|
||||
message: 'tag should start with "@" symbol',
|
||||
line: spec.source[0].number,
|
||||
critical: true,
|
||||
});
|
||||
return spec;
|
||||
}
|
||||
if (match[1].includes('/')) {
|
||||
return spec;
|
||||
}
|
||||
tokens.tag = match[1];
|
||||
tokens.postTag = match[3];
|
||||
tokens.description = tokens.description.slice(match[0].length);
|
||||
spec.tag = match[2];
|
||||
return spec;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets splits remaining `Spec.lines[].tokens.description` into `type` and `description`
|
||||
* tokens and populates Spec.type`
|
||||
*
|
||||
* @param {Spacing} spacing tells how to deal with a whitespace
|
||||
* for type values going over multiple lines
|
||||
*/
|
||||
function typeTokenizer(spacing = 'compact') {
|
||||
const join = getJoiner$1(spacing);
|
||||
return (spec) => {
|
||||
let curlies = 0;
|
||||
let lines = [];
|
||||
let descriptionBegun = false;
|
||||
let firstTypeIteration = true;
|
||||
for (const { tokens } of spec.source.values()) {
|
||||
let type = '';
|
||||
if (!descriptionBegun && tokens.description.trim()) {
|
||||
descriptionBegun = true;
|
||||
}
|
||||
else if (!descriptionBegun) {
|
||||
continue;
|
||||
}
|
||||
if (firstTypeIteration && tokens.description[0] !== '{')
|
||||
return spec;
|
||||
firstTypeIteration = false;
|
||||
for (const ch of tokens.description) {
|
||||
if (ch === '{')
|
||||
curlies++;
|
||||
if (ch === '}')
|
||||
curlies--;
|
||||
type += ch;
|
||||
if (curlies === 0)
|
||||
break;
|
||||
}
|
||||
lines.push([tokens, type]);
|
||||
if (curlies === 0)
|
||||
break;
|
||||
}
|
||||
if (!descriptionBegun) {
|
||||
return spec;
|
||||
}
|
||||
if (curlies !== 0) {
|
||||
spec.problems.push({
|
||||
code: 'spec:type:unpaired-curlies',
|
||||
message: 'unpaired curlies',
|
||||
line: spec.source[0].number,
|
||||
critical: true,
|
||||
});
|
||||
return spec;
|
||||
}
|
||||
const parts = [];
|
||||
const offset = lines[0][0].postDelimiter.length;
|
||||
for (const [i, [tokens, type]] of lines.entries()) {
|
||||
tokens.type = type;
|
||||
if (i > 0) {
|
||||
tokens.type = tokens.postDelimiter.slice(offset) + type;
|
||||
tokens.postDelimiter = tokens.postDelimiter.slice(0, offset);
|
||||
}
|
||||
[tokens.postType, tokens.description] = splitSpace(tokens.description.slice(type.length));
|
||||
parts.push(tokens.type);
|
||||
}
|
||||
parts[0] = parts[0].slice(1);
|
||||
parts[parts.length - 1] = parts[parts.length - 1].slice(0, -1);
|
||||
spec.type = join(parts);
|
||||
return spec;
|
||||
};
|
||||
}
|
||||
const trim = (x) => x.trim();
|
||||
function getJoiner$1(spacing) {
|
||||
if (spacing === 'compact')
|
||||
return (t) => t.map(trim).join('');
|
||||
else if (spacing === 'preserve')
|
||||
return (t) => t.join('\n');
|
||||
else
|
||||
return spacing;
|
||||
}
|
||||
|
||||
const isQuoted = (s) => s && s.startsWith('"') && s.endsWith('"');
|
||||
/**
|
||||
* Splits remaining `spec.lines[].tokens.description` into `name` and `descriptions` tokens,
|
||||
* and populates the `spec.name`
|
||||
*/
|
||||
function nameTokenizer() {
|
||||
const typeEnd = (num, { tokens }, i) => tokens.type === '' ? num : i;
|
||||
return (spec) => {
|
||||
// look for the name starting in the line where {type} ends
|
||||
let finalTypeLine = spec.source.reduce(typeEnd, 0);
|
||||
let tokens;
|
||||
if (spec.type) {
|
||||
do {
|
||||
({ tokens } = spec.source[finalTypeLine]);
|
||||
if (tokens.description.trim()) {
|
||||
break;
|
||||
}
|
||||
finalTypeLine++;
|
||||
} while (spec.source[finalTypeLine]);
|
||||
}
|
||||
else {
|
||||
({ tokens } = spec.source[finalTypeLine]);
|
||||
}
|
||||
const source = tokens.description.trimStart();
|
||||
const quotedGroups = source.split('"');
|
||||
// if it starts with quoted group, assume it is a literal
|
||||
if (quotedGroups.length > 1 &&
|
||||
quotedGroups[0] === '' &&
|
||||
quotedGroups.length % 2 === 1) {
|
||||
spec.name = quotedGroups[1];
|
||||
tokens.name = `"${quotedGroups[1]}"`;
|
||||
[tokens.postName, tokens.description] = splitSpace(source.slice(tokens.name.length));
|
||||
return spec;
|
||||
}
|
||||
let brackets = 0;
|
||||
let name = '';
|
||||
let optional = false;
|
||||
let defaultValue;
|
||||
// assume name is non-space string or anything wrapped into brackets
|
||||
for (const ch of source) {
|
||||
if (brackets === 0 && isSpace(ch))
|
||||
break;
|
||||
if (ch === '[')
|
||||
brackets++;
|
||||
if (ch === ']')
|
||||
brackets--;
|
||||
name += ch;
|
||||
}
|
||||
if (brackets !== 0) {
|
||||
spec.problems.push({
|
||||
code: 'spec:name:unpaired-brackets',
|
||||
message: 'unpaired brackets',
|
||||
line: spec.source[0].number,
|
||||
critical: true,
|
||||
});
|
||||
return spec;
|
||||
}
|
||||
const nameToken = name;
|
||||
if (name[0] === '[' && name[name.length - 1] === ']') {
|
||||
optional = true;
|
||||
name = name.slice(1, -1);
|
||||
const parts = name.split('=');
|
||||
name = parts[0].trim();
|
||||
if (parts[1] !== undefined)
|
||||
defaultValue = parts.slice(1).join('=').trim();
|
||||
if (name === '') {
|
||||
spec.problems.push({
|
||||
code: 'spec:name:empty-name',
|
||||
message: 'empty name',
|
||||
line: spec.source[0].number,
|
||||
critical: true,
|
||||
});
|
||||
return spec;
|
||||
}
|
||||
if (defaultValue === '') {
|
||||
spec.problems.push({
|
||||
code: 'spec:name:empty-default',
|
||||
message: 'empty default value',
|
||||
line: spec.source[0].number,
|
||||
critical: true,
|
||||
});
|
||||
return spec;
|
||||
}
|
||||
// has "=" and is not a string, except for "=>"
|
||||
if (!isQuoted(defaultValue) && /=(?!>)/.test(defaultValue)) {
|
||||
spec.problems.push({
|
||||
code: 'spec:name:invalid-default',
|
||||
message: 'invalid default value syntax',
|
||||
line: spec.source[0].number,
|
||||
critical: true,
|
||||
});
|
||||
return spec;
|
||||
}
|
||||
}
|
||||
if (!optional) {
|
||||
const eqIndex = name.search(/=(?!>)/);
|
||||
if (eqIndex !== -1) {
|
||||
defaultValue = name.slice(eqIndex + 1).trim();
|
||||
name = name.slice(0, eqIndex).trim();
|
||||
if (name === '') {
|
||||
spec.problems.push({
|
||||
code: 'spec:name:empty-name',
|
||||
message: 'empty name',
|
||||
line: spec.source[0].number,
|
||||
critical: true,
|
||||
});
|
||||
return spec;
|
||||
}
|
||||
if (defaultValue === '') {
|
||||
spec.problems.push({
|
||||
code: 'spec:name:empty-default',
|
||||
message: 'empty default value',
|
||||
line: spec.source[0].number,
|
||||
critical: true,
|
||||
});
|
||||
return spec;
|
||||
}
|
||||
if (!isQuoted(defaultValue) && /=(?!>)/.test(defaultValue)) {
|
||||
spec.problems.push({
|
||||
code: 'spec:name:invalid-default',
|
||||
message: 'invalid default value syntax',
|
||||
line: spec.source[0].number,
|
||||
critical: true,
|
||||
});
|
||||
return spec;
|
||||
}
|
||||
}
|
||||
}
|
||||
spec.optional = optional;
|
||||
spec.name = name;
|
||||
tokens.name = nameToken;
|
||||
if (defaultValue !== undefined)
|
||||
spec.default = defaultValue;
|
||||
[tokens.postName, tokens.description] = splitSpace(source.slice(tokens.name.length));
|
||||
return spec;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Makes no changes to `spec.lines[].tokens` but joins them into `spec.description`
|
||||
* following given spacing srtategy
|
||||
* @param {Spacing} spacing tells how to handle the whitespace
|
||||
* @param {BlockMarkers} markers tells how to handle comment block delimitation
|
||||
*/
|
||||
function descriptionTokenizer(spacing = 'compact', markers = exports.Markers) {
|
||||
const join = getJoiner(spacing);
|
||||
return (spec) => {
|
||||
spec.description = join(spec.source, markers);
|
||||
return spec;
|
||||
};
|
||||
}
|
||||
function getJoiner(spacing) {
|
||||
if (spacing === 'compact')
|
||||
return compactJoiner;
|
||||
if (spacing === 'preserve')
|
||||
return preserveJoiner;
|
||||
return spacing;
|
||||
}
|
||||
function compactJoiner(lines, markers = exports.Markers) {
|
||||
return lines
|
||||
.map(({ tokens: { description } }) => description.trim())
|
||||
.filter((description) => description !== '')
|
||||
.join(' ');
|
||||
}
|
||||
const lineNo = (num, { tokens }, i) => tokens.type === '' ? num : i;
|
||||
const getDescription = ({ tokens }) => (tokens.delimiter === '' ? tokens.start : tokens.postDelimiter.slice(1)) +
|
||||
tokens.description;
|
||||
function preserveJoiner(lines, markers = exports.Markers) {
|
||||
if (lines.length === 0)
|
||||
return '';
|
||||
// skip the opening line with no description
|
||||
if (lines[0].tokens.description === '' &&
|
||||
lines[0].tokens.delimiter === markers.start)
|
||||
lines = lines.slice(1);
|
||||
// skip the closing line with no description
|
||||
const lastLine = lines[lines.length - 1];
|
||||
if (lastLine !== undefined &&
|
||||
lastLine.tokens.description === '' &&
|
||||
lastLine.tokens.end.endsWith(markers.end))
|
||||
lines = lines.slice(0, -1);
|
||||
// description starts at the last line of type definition
|
||||
lines = lines.slice(lines.reduce(lineNo, 0));
|
||||
return lines.map(getDescription).join('\n');
|
||||
}
|
||||
|
||||
function getParser({ startLine = 0, fence = '```', spacing = 'compact', markers = exports.Markers, tokenizers = [
|
||||
tagTokenizer(),
|
||||
typeTokenizer(spacing),
|
||||
nameTokenizer(),
|
||||
descriptionTokenizer(spacing),
|
||||
], } = {}) {
|
||||
if (startLine < 0 || startLine % 1 > 0)
|
||||
throw new Error('Invalid startLine');
|
||||
const parseSource = getParser$2({ startLine, markers });
|
||||
const parseBlock = getParser$3({ fence });
|
||||
const parseSpec = getParser$1({ tokenizers });
|
||||
const joinDescription = getJoiner(spacing);
|
||||
return function (source) {
|
||||
const blocks = [];
|
||||
for (const line of splitLines(source)) {
|
||||
const lines = parseSource(line);
|
||||
if (lines === null)
|
||||
continue;
|
||||
const sections = parseBlock(lines);
|
||||
const specs = sections.slice(1).map(parseSpec);
|
||||
blocks.push({
|
||||
description: joinDescription(sections[0], markers),
|
||||
tags: specs,
|
||||
source: lines,
|
||||
problems: specs.reduce((acc, spec) => acc.concat(spec.problems), []),
|
||||
});
|
||||
}
|
||||
return blocks;
|
||||
};
|
||||
}
|
||||
|
||||
function join(tokens) {
|
||||
return (tokens.start +
|
||||
tokens.delimiter +
|
||||
tokens.postDelimiter +
|
||||
tokens.tag +
|
||||
tokens.postTag +
|
||||
tokens.type +
|
||||
tokens.postType +
|
||||
tokens.name +
|
||||
tokens.postName +
|
||||
tokens.description +
|
||||
tokens.end +
|
||||
tokens.lineEnd);
|
||||
}
|
||||
function getStringifier() {
|
||||
return (block) => block.source.map(({ tokens }) => join(tokens)).join('\n');
|
||||
}
|
||||
|
||||
var __rest$2 = (window && window.__rest) || function (s, e) {
|
||||
var t = {};
|
||||
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
|
||||
t[p] = s[p];
|
||||
if (s != null && typeof Object.getOwnPropertySymbols === "function")
|
||||
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
|
||||
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
|
||||
t[p[i]] = s[p[i]];
|
||||
}
|
||||
return t;
|
||||
};
|
||||
const zeroWidth$1 = {
|
||||
start: 0,
|
||||
tag: 0,
|
||||
type: 0,
|
||||
name: 0,
|
||||
};
|
||||
const getWidth = (markers = exports.Markers) => (w, { tokens: t }) => ({
|
||||
start: t.delimiter === markers.start ? t.start.length : w.start,
|
||||
tag: Math.max(w.tag, t.tag.length),
|
||||
type: Math.max(w.type, t.type.length),
|
||||
name: Math.max(w.name, t.name.length),
|
||||
});
|
||||
const space = (len) => ''.padStart(len, ' ');
|
||||
function align$1(markers = exports.Markers) {
|
||||
let intoTags = false;
|
||||
let w;
|
||||
function update(line) {
|
||||
const tokens = Object.assign({}, line.tokens);
|
||||
if (tokens.tag !== '')
|
||||
intoTags = true;
|
||||
const isEmpty = tokens.tag === '' &&
|
||||
tokens.name === '' &&
|
||||
tokens.type === '' &&
|
||||
tokens.description === '';
|
||||
// dangling '*/'
|
||||
if (tokens.end === markers.end && isEmpty) {
|
||||
tokens.start = space(w.start + 1);
|
||||
return Object.assign(Object.assign({}, line), { tokens });
|
||||
}
|
||||
switch (tokens.delimiter) {
|
||||
case markers.start:
|
||||
tokens.start = space(w.start);
|
||||
break;
|
||||
case markers.delim:
|
||||
tokens.start = space(w.start + 1);
|
||||
break;
|
||||
default:
|
||||
tokens.delimiter = '';
|
||||
tokens.start = space(w.start + 2); // compensate delimiter
|
||||
}
|
||||
if (!intoTags) {
|
||||
tokens.postDelimiter = tokens.description === '' ? '' : ' ';
|
||||
return Object.assign(Object.assign({}, line), { tokens });
|
||||
}
|
||||
const nothingAfter = {
|
||||
delim: false,
|
||||
tag: false,
|
||||
type: false,
|
||||
name: false,
|
||||
};
|
||||
if (tokens.description === '') {
|
||||
nothingAfter.name = true;
|
||||
tokens.postName = '';
|
||||
if (tokens.name === '') {
|
||||
nothingAfter.type = true;
|
||||
tokens.postType = '';
|
||||
if (tokens.type === '') {
|
||||
nothingAfter.tag = true;
|
||||
tokens.postTag = '';
|
||||
if (tokens.tag === '') {
|
||||
nothingAfter.delim = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
tokens.postDelimiter = nothingAfter.delim ? '' : ' ';
|
||||
if (!nothingAfter.tag)
|
||||
tokens.postTag = space(w.tag - tokens.tag.length + 1);
|
||||
if (!nothingAfter.type)
|
||||
tokens.postType = space(w.type - tokens.type.length + 1);
|
||||
if (!nothingAfter.name)
|
||||
tokens.postName = space(w.name - tokens.name.length + 1);
|
||||
return Object.assign(Object.assign({}, line), { tokens });
|
||||
}
|
||||
return (_a) => {
|
||||
var { source } = _a, fields = __rest$2(_a, ["source"]);
|
||||
w = source.reduce(getWidth(markers), Object.assign({}, zeroWidth$1));
|
||||
return rewireSource(Object.assign(Object.assign({}, fields), { source: source.map(update) }));
|
||||
};
|
||||
}
|
||||
|
||||
var __rest$1 = (window && window.__rest) || function (s, e) {
|
||||
var t = {};
|
||||
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
|
||||
t[p] = s[p];
|
||||
if (s != null && typeof Object.getOwnPropertySymbols === "function")
|
||||
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
|
||||
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
|
||||
t[p[i]] = s[p[i]];
|
||||
}
|
||||
return t;
|
||||
};
|
||||
const pull = (offset) => (str) => str.slice(offset);
|
||||
const push = (offset) => {
|
||||
const space = ''.padStart(offset, ' ');
|
||||
return (str) => str + space;
|
||||
};
|
||||
function indent(pos) {
|
||||
let shift;
|
||||
const pad = (start) => {
|
||||
if (shift === undefined) {
|
||||
const offset = pos - start.length;
|
||||
shift = offset > 0 ? push(offset) : pull(-offset);
|
||||
}
|
||||
return shift(start);
|
||||
};
|
||||
const update = (line) => (Object.assign(Object.assign({}, line), { tokens: Object.assign(Object.assign({}, line.tokens), { start: pad(line.tokens.start) }) }));
|
||||
return (_a) => {
|
||||
var { source } = _a, fields = __rest$1(_a, ["source"]);
|
||||
return rewireSource(Object.assign(Object.assign({}, fields), { source: source.map(update) }));
|
||||
};
|
||||
}
|
||||
|
||||
var __rest = (window && window.__rest) || function (s, e) {
|
||||
var t = {};
|
||||
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
|
||||
t[p] = s[p];
|
||||
if (s != null && typeof Object.getOwnPropertySymbols === "function")
|
||||
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
|
||||
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
|
||||
t[p[i]] = s[p[i]];
|
||||
}
|
||||
return t;
|
||||
};
|
||||
function crlf(ending) {
|
||||
function update(line) {
|
||||
return Object.assign(Object.assign({}, line), { tokens: Object.assign(Object.assign({}, line.tokens), { lineEnd: ending === 'LF' ? '' : '\r' }) });
|
||||
}
|
||||
return (_a) => {
|
||||
var { source } = _a, fields = __rest(_a, ["source"]);
|
||||
return rewireSource(Object.assign(Object.assign({}, fields), { source: source.map(update) }));
|
||||
};
|
||||
}
|
||||
|
||||
function flow(...transforms) {
|
||||
return (block) => transforms.reduce((block, t) => t(block), block);
|
||||
}
|
||||
|
||||
const zeroWidth = {
|
||||
line: 0,
|
||||
start: 0,
|
||||
delimiter: 0,
|
||||
postDelimiter: 0,
|
||||
tag: 0,
|
||||
postTag: 0,
|
||||
name: 0,
|
||||
postName: 0,
|
||||
type: 0,
|
||||
postType: 0,
|
||||
description: 0,
|
||||
end: 0,
|
||||
lineEnd: 0,
|
||||
};
|
||||
const headers = { lineEnd: 'CR' };
|
||||
const fields = Object.keys(zeroWidth);
|
||||
const repr = (x) => (isSpace(x) ? `{${x.length}}` : x);
|
||||
const frame = (line) => '|' + line.join('|') + '|';
|
||||
const align = (width, tokens) => Object.keys(tokens).map((k) => repr(tokens[k]).padEnd(width[k]));
|
||||
function inspect({ source }) {
|
||||
var _a, _b;
|
||||
if (source.length === 0)
|
||||
return '';
|
||||
const width = Object.assign({}, zeroWidth);
|
||||
for (const f of fields)
|
||||
width[f] = ((_a = headers[f]) !== null && _a !== void 0 ? _a : f).length;
|
||||
for (const { number, tokens } of source) {
|
||||
width.line = Math.max(width.line, number.toString().length);
|
||||
for (const k in tokens)
|
||||
width[k] = Math.max(width[k], repr(tokens[k]).length);
|
||||
}
|
||||
const lines = [[], []];
|
||||
for (const f of fields)
|
||||
lines[0].push(((_b = headers[f]) !== null && _b !== void 0 ? _b : f).padEnd(width[f]));
|
||||
for (const f of fields)
|
||||
lines[1].push('-'.padEnd(width[f], '-'));
|
||||
for (const { number, tokens } of source) {
|
||||
const line = number.toString().padStart(width.line);
|
||||
lines.push([line, ...align(width, tokens)]);
|
||||
}
|
||||
return lines.map(frame).join('\n');
|
||||
}
|
||||
|
||||
function parse(source, options = {}) {
|
||||
return getParser(options)(source);
|
||||
}
|
||||
const stringify = getStringifier();
|
||||
const transforms = {
|
||||
flow: flow,
|
||||
align: align$1,
|
||||
indent: indent,
|
||||
crlf: crlf,
|
||||
};
|
||||
const tokenizers = {
|
||||
tag: tagTokenizer,
|
||||
type: typeTokenizer,
|
||||
name: nameTokenizer,
|
||||
description: descriptionTokenizer,
|
||||
};
|
||||
const util = { rewireSpecs, rewireSource, seedBlock, seedTokens };
|
||||
|
||||
exports.inspect = inspect;
|
||||
exports.parse = parse;
|
||||
exports.stringify = stringify;
|
||||
exports.tokenizers = tokenizers;
|
||||
exports.transforms = transforms;
|
||||
exports.util = util;
|
||||
|
||||
return exports;
|
||||
|
||||
})({});
|
||||
+32
@@ -0,0 +1,32 @@
|
||||
import { Options as ParserOptions } from './parser/index.js';
|
||||
import descriptionTokenizer from './parser/tokenizers/description.js';
|
||||
import nameTokenizer from './parser/tokenizers/name.js';
|
||||
import tagTokenizer from './parser/tokenizers/tag.js';
|
||||
import typeTokenizer from './parser/tokenizers/type.js';
|
||||
import alignTransform from './transforms/align.js';
|
||||
import indentTransform from './transforms/indent.js';
|
||||
import crlfTransform from './transforms/crlf.js';
|
||||
import { flow as flowTransform } from './transforms/index.js';
|
||||
import { rewireSpecs, rewireSource, seedBlock, seedTokens } from './util.js';
|
||||
export * from './primitives.js';
|
||||
export declare function parse(source: string, options?: Partial<ParserOptions>): import("./primitives.js").Block[];
|
||||
export declare const stringify: import("./stringifier/index.js").Stringifier;
|
||||
export { default as inspect } from './stringifier/inspect.js';
|
||||
export declare const transforms: {
|
||||
flow: typeof flowTransform;
|
||||
align: typeof alignTransform;
|
||||
indent: typeof indentTransform;
|
||||
crlf: typeof crlfTransform;
|
||||
};
|
||||
export declare const tokenizers: {
|
||||
tag: typeof tagTokenizer;
|
||||
type: typeof typeTokenizer;
|
||||
name: typeof nameTokenizer;
|
||||
description: typeof descriptionTokenizer;
|
||||
};
|
||||
export declare const util: {
|
||||
rewireSpecs: typeof rewireSpecs;
|
||||
rewireSource: typeof rewireSource;
|
||||
seedBlock: typeof seedBlock;
|
||||
seedTokens: typeof seedTokens;
|
||||
};
|
||||
+30
@@ -0,0 +1,30 @@
|
||||
import getParser from './parser/index.js';
|
||||
import descriptionTokenizer from './parser/tokenizers/description.js';
|
||||
import nameTokenizer from './parser/tokenizers/name.js';
|
||||
import tagTokenizer from './parser/tokenizers/tag.js';
|
||||
import typeTokenizer from './parser/tokenizers/type.js';
|
||||
import getStringifier from './stringifier/index.js';
|
||||
import alignTransform from './transforms/align.js';
|
||||
import indentTransform from './transforms/indent.js';
|
||||
import crlfTransform from './transforms/crlf.js';
|
||||
import { flow as flowTransform } from './transforms/index.js';
|
||||
import { rewireSpecs, rewireSource, seedBlock, seedTokens } from './util.js';
|
||||
export * from './primitives.js';
|
||||
export function parse(source, options = {}) {
|
||||
return getParser(options)(source);
|
||||
}
|
||||
export const stringify = getStringifier();
|
||||
export { default as inspect } from './stringifier/inspect.js';
|
||||
export const transforms = {
|
||||
flow: flowTransform,
|
||||
align: alignTransform,
|
||||
indent: indentTransform,
|
||||
crlf: crlfTransform,
|
||||
};
|
||||
export const tokenizers = {
|
||||
tag: tagTokenizer,
|
||||
type: typeTokenizer,
|
||||
name: nameTokenizer,
|
||||
description: descriptionTokenizer,
|
||||
};
|
||||
export const util = { rewireSpecs, rewireSource, seedBlock, seedTokens };
|
||||
+24
@@ -0,0 +1,24 @@
|
||||
import { Line } from '../primitives.js';
|
||||
/**
|
||||
* Groups source lines in sections representing tags.
|
||||
* First section is a block description if present. Last section captures lines starting with
|
||||
* the last tag to the end of the block, including dangling closing marker.
|
||||
* @param {Line[]} block source lines making a single comment block
|
||||
*/
|
||||
export type Parser = (block: Line[]) => Line[][];
|
||||
/**
|
||||
* Predicate telling if string contains opening/closing escaping sequence
|
||||
* @param {string} source raw source line
|
||||
*/
|
||||
export type Fencer = (source: string) => boolean;
|
||||
/**
|
||||
* `Parser` configuration options
|
||||
*/
|
||||
export interface Options {
|
||||
fence: string | Fencer;
|
||||
}
|
||||
/**
|
||||
* Creates configured `Parser`
|
||||
* @param {Partial<Options>} options
|
||||
*/
|
||||
export default function getParser({ fence, }?: Partial<Options>): Parser;
|
||||
+29
@@ -0,0 +1,29 @@
|
||||
const reTag = /^@[^\s/]+(?=\s|$)/;
|
||||
/**
|
||||
* Creates configured `Parser`
|
||||
* @param {Partial<Options>} options
|
||||
*/
|
||||
export default function getParser({ fence = '```', } = {}) {
|
||||
const fencer = getFencer(fence);
|
||||
const toggleFence = (source, isFenced) => fencer(source) ? !isFenced : isFenced;
|
||||
return function parseBlock(source) {
|
||||
// start with description section
|
||||
const sections = [[]];
|
||||
let isFenced = false;
|
||||
for (const line of source) {
|
||||
if (reTag.test(line.tokens.description) && !isFenced) {
|
||||
sections.push([line]);
|
||||
}
|
||||
else {
|
||||
sections[sections.length - 1].push(line);
|
||||
}
|
||||
isFenced = toggleFence(line.tokens.description, isFenced);
|
||||
}
|
||||
return sections;
|
||||
};
|
||||
}
|
||||
function getFencer(fence) {
|
||||
if (typeof fence === 'string')
|
||||
return (source) => source.split(fence).length % 2 === 0;
|
||||
return fence;
|
||||
}
|
||||
+11
@@ -0,0 +1,11 @@
|
||||
import { Block, BlockMarkers } from '../primitives.js';
|
||||
import { Tokenizer } from './tokenizers/index.js';
|
||||
export interface Options {
|
||||
startLine: number;
|
||||
fence: string;
|
||||
spacing: 'compact' | 'preserve';
|
||||
markers: BlockMarkers;
|
||||
tokenizers: Tokenizer[];
|
||||
}
|
||||
export type Parser = (source: string) => Block[];
|
||||
export default function getParser({ startLine, fence, spacing, markers, tokenizers, }?: Partial<Options>): Parser;
|
||||
+39
@@ -0,0 +1,39 @@
|
||||
import { Markers } from '../primitives.js';
|
||||
import { splitLines } from '../util.js';
|
||||
import blockParser from './block-parser.js';
|
||||
import sourceParser from './source-parser.js';
|
||||
import specParser from './spec-parser.js';
|
||||
import tokenizeTag from './tokenizers/tag.js';
|
||||
import tokenizeType from './tokenizers/type.js';
|
||||
import tokenizeName from './tokenizers/name.js';
|
||||
import tokenizeDescription, { getJoiner as getDescriptionJoiner, } from './tokenizers/description.js';
|
||||
export default function getParser({ startLine = 0, fence = '```', spacing = 'compact', markers = Markers, tokenizers = [
|
||||
tokenizeTag(),
|
||||
tokenizeType(spacing),
|
||||
tokenizeName(),
|
||||
tokenizeDescription(spacing),
|
||||
], } = {}) {
|
||||
if (startLine < 0 || startLine % 1 > 0)
|
||||
throw new Error('Invalid startLine');
|
||||
const parseSource = sourceParser({ startLine, markers });
|
||||
const parseBlock = blockParser({ fence });
|
||||
const parseSpec = specParser({ tokenizers });
|
||||
const joinDescription = getDescriptionJoiner(spacing);
|
||||
return function (source) {
|
||||
const blocks = [];
|
||||
for (const line of splitLines(source)) {
|
||||
const lines = parseSource(line);
|
||||
if (lines === null)
|
||||
continue;
|
||||
const sections = parseBlock(lines);
|
||||
const specs = sections.slice(1).map(parseSpec);
|
||||
blocks.push({
|
||||
description: joinDescription(sections[0], markers),
|
||||
tags: specs,
|
||||
source: lines,
|
||||
problems: specs.reduce((acc, spec) => acc.concat(spec.problems), []),
|
||||
});
|
||||
}
|
||||
return blocks;
|
||||
};
|
||||
}
|
||||
+7
@@ -0,0 +1,7 @@
|
||||
import { Line, BlockMarkers } from '../primitives.js';
|
||||
export interface Options {
|
||||
startLine: number;
|
||||
markers: BlockMarkers;
|
||||
}
|
||||
export type Parser = (source: string) => Line[] | null;
|
||||
export default function getParser({ startLine, markers, }?: Partial<Options>): Parser;
|
||||
+46
@@ -0,0 +1,46 @@
|
||||
import { Markers } from '../primitives.js';
|
||||
import { seedTokens, splitSpace, splitCR } from '../util.js';
|
||||
export default function getParser({ startLine = 0, markers = Markers, } = {}) {
|
||||
let block = null;
|
||||
let num = startLine;
|
||||
return function parseSource(source) {
|
||||
let rest = source;
|
||||
const tokens = seedTokens();
|
||||
[tokens.lineEnd, rest] = splitCR(rest);
|
||||
[tokens.start, rest] = splitSpace(rest);
|
||||
if (block === null &&
|
||||
rest.startsWith(markers.start) &&
|
||||
!rest.startsWith(markers.nostart)) {
|
||||
block = [];
|
||||
tokens.delimiter = rest.slice(0, markers.start.length);
|
||||
rest = rest.slice(markers.start.length);
|
||||
[tokens.postDelimiter, rest] = splitSpace(rest);
|
||||
}
|
||||
if (block === null) {
|
||||
num++;
|
||||
return null;
|
||||
}
|
||||
const isClosed = rest.trimRight().endsWith(markers.end);
|
||||
if (tokens.delimiter === '' &&
|
||||
rest.startsWith(markers.delim) &&
|
||||
!rest.startsWith(markers.end)) {
|
||||
tokens.delimiter = markers.delim;
|
||||
rest = rest.slice(markers.delim.length);
|
||||
[tokens.postDelimiter, rest] = splitSpace(rest);
|
||||
}
|
||||
if (isClosed) {
|
||||
const trimmed = rest.trimRight();
|
||||
tokens.end = rest.slice(trimmed.length - markers.end.length);
|
||||
rest = trimmed.slice(0, -markers.end.length);
|
||||
}
|
||||
tokens.description = rest;
|
||||
block.push({ number: num, source, tokens });
|
||||
num++;
|
||||
if (isClosed) {
|
||||
const result = block.slice();
|
||||
block = null;
|
||||
return result;
|
||||
}
|
||||
return null;
|
||||
};
|
||||
}
|
||||
+7
@@ -0,0 +1,7 @@
|
||||
import { Line, Spec } from '../primitives.js';
|
||||
import { Tokenizer } from './tokenizers/index.js';
|
||||
export type Parser = (source: Line[]) => Spec;
|
||||
export interface Options {
|
||||
tokenizers: Tokenizer[];
|
||||
}
|
||||
export default function getParser({ tokenizers }: Options): Parser;
|
||||
+13
@@ -0,0 +1,13 @@
|
||||
import { seedSpec } from '../util.js';
|
||||
export default function getParser({ tokenizers }) {
|
||||
return function parseSpec(source) {
|
||||
var _a;
|
||||
let spec = seedSpec({ source });
|
||||
for (const tokenize of tokenizers) {
|
||||
spec = tokenize(spec);
|
||||
if ((_a = spec.problems[spec.problems.length - 1]) === null || _a === void 0 ? void 0 : _a.critical)
|
||||
break;
|
||||
}
|
||||
return spec;
|
||||
};
|
||||
}
|
||||
+20
@@ -0,0 +1,20 @@
|
||||
import { Line, BlockMarkers, Markers } from '../../primitives.js';
|
||||
import { Tokenizer } from './index.js';
|
||||
/**
|
||||
* Walks over provided lines joining description token into a single string.
|
||||
* */
|
||||
export type Joiner = (lines: Line[], markers?: BlockMarkers) => string;
|
||||
/**
|
||||
* Shortcut for standard Joiners
|
||||
* compact - strip surrounding whitespace and concat lines using a single string
|
||||
* preserve - preserves original whitespace and line breaks as is
|
||||
*/
|
||||
export type Spacing = 'compact' | 'preserve' | Joiner;
|
||||
/**
|
||||
* Makes no changes to `spec.lines[].tokens` but joins them into `spec.description`
|
||||
* following given spacing srtategy
|
||||
* @param {Spacing} spacing tells how to handle the whitespace
|
||||
* @param {BlockMarkers} markers tells how to handle comment block delimitation
|
||||
*/
|
||||
export default function descriptionTokenizer(spacing?: Spacing, markers?: typeof Markers): Tokenizer;
|
||||
export declare function getJoiner(spacing: Spacing): Joiner;
|
||||
+47
@@ -0,0 +1,47 @@
|
||||
import { Markers } from '../../primitives.js';
|
||||
/**
|
||||
* Makes no changes to `spec.lines[].tokens` but joins them into `spec.description`
|
||||
* following given spacing srtategy
|
||||
* @param {Spacing} spacing tells how to handle the whitespace
|
||||
* @param {BlockMarkers} markers tells how to handle comment block delimitation
|
||||
*/
|
||||
export default function descriptionTokenizer(spacing = 'compact', markers = Markers) {
|
||||
const join = getJoiner(spacing);
|
||||
return (spec) => {
|
||||
spec.description = join(spec.source, markers);
|
||||
return spec;
|
||||
};
|
||||
}
|
||||
export function getJoiner(spacing) {
|
||||
if (spacing === 'compact')
|
||||
return compactJoiner;
|
||||
if (spacing === 'preserve')
|
||||
return preserveJoiner;
|
||||
return spacing;
|
||||
}
|
||||
function compactJoiner(lines, markers = Markers) {
|
||||
return lines
|
||||
.map(({ tokens: { description } }) => description.trim())
|
||||
.filter((description) => description !== '')
|
||||
.join(' ');
|
||||
}
|
||||
const lineNo = (num, { tokens }, i) => tokens.type === '' ? num : i;
|
||||
const getDescription = ({ tokens }) => (tokens.delimiter === '' ? tokens.start : tokens.postDelimiter.slice(1)) +
|
||||
tokens.description;
|
||||
function preserveJoiner(lines, markers = Markers) {
|
||||
if (lines.length === 0)
|
||||
return '';
|
||||
// skip the opening line with no description
|
||||
if (lines[0].tokens.description === '' &&
|
||||
lines[0].tokens.delimiter === markers.start)
|
||||
lines = lines.slice(1);
|
||||
// skip the closing line with no description
|
||||
const lastLine = lines[lines.length - 1];
|
||||
if (lastLine !== undefined &&
|
||||
lastLine.tokens.description === '' &&
|
||||
lastLine.tokens.end.endsWith(markers.end))
|
||||
lines = lines.slice(0, -1);
|
||||
// description starts at the last line of type definition
|
||||
lines = lines.slice(lines.reduce(lineNo, 0));
|
||||
return lines.map(getDescription).join('\n');
|
||||
}
|
||||
+7
@@ -0,0 +1,7 @@
|
||||
import { Spec } from '../../primitives.js';
|
||||
/**
|
||||
* Splits `spect.lines[].token.description` into other tokens,
|
||||
* and populates the spec.{tag, name, type, description}. Invoked in a chaing
|
||||
* with other tokens, operations listed above can be moved to separate tokenizers
|
||||
*/
|
||||
export type Tokenizer = (spec: Spec) => Spec;
|
||||
+1
@@ -0,0 +1 @@
|
||||
export {};
|
||||
+6
@@ -0,0 +1,6 @@
|
||||
import { Tokenizer } from './index.js';
|
||||
/**
|
||||
* Splits remaining `spec.lines[].tokens.description` into `name` and `descriptions` tokens,
|
||||
* and populates the `spec.name`
|
||||
*/
|
||||
export default function nameTokenizer(): Tokenizer;
|
||||
+138
@@ -0,0 +1,138 @@
|
||||
import { splitSpace, isSpace } from '../../util.js';
|
||||
const isQuoted = (s) => s && s.startsWith('"') && s.endsWith('"');
|
||||
/**
|
||||
* Splits remaining `spec.lines[].tokens.description` into `name` and `descriptions` tokens,
|
||||
* and populates the `spec.name`
|
||||
*/
|
||||
export default function nameTokenizer() {
|
||||
const typeEnd = (num, { tokens }, i) => tokens.type === '' ? num : i;
|
||||
return (spec) => {
|
||||
// look for the name starting in the line where {type} ends
|
||||
let finalTypeLine = spec.source.reduce(typeEnd, 0);
|
||||
let tokens;
|
||||
if (spec.type) {
|
||||
do {
|
||||
({ tokens } = spec.source[finalTypeLine]);
|
||||
if (tokens.description.trim()) {
|
||||
break;
|
||||
}
|
||||
finalTypeLine++;
|
||||
} while (spec.source[finalTypeLine]);
|
||||
}
|
||||
else {
|
||||
({ tokens } = spec.source[finalTypeLine]);
|
||||
}
|
||||
const source = tokens.description.trimStart();
|
||||
const quotedGroups = source.split('"');
|
||||
// if it starts with quoted group, assume it is a literal
|
||||
if (quotedGroups.length > 1 &&
|
||||
quotedGroups[0] === '' &&
|
||||
quotedGroups.length % 2 === 1) {
|
||||
spec.name = quotedGroups[1];
|
||||
tokens.name = `"${quotedGroups[1]}"`;
|
||||
[tokens.postName, tokens.description] = splitSpace(source.slice(tokens.name.length));
|
||||
return spec;
|
||||
}
|
||||
let brackets = 0;
|
||||
let name = '';
|
||||
let optional = false;
|
||||
let defaultValue;
|
||||
// assume name is non-space string or anything wrapped into brackets
|
||||
for (const ch of source) {
|
||||
if (brackets === 0 && isSpace(ch))
|
||||
break;
|
||||
if (ch === '[')
|
||||
brackets++;
|
||||
if (ch === ']')
|
||||
brackets--;
|
||||
name += ch;
|
||||
}
|
||||
if (brackets !== 0) {
|
||||
spec.problems.push({
|
||||
code: 'spec:name:unpaired-brackets',
|
||||
message: 'unpaired brackets',
|
||||
line: spec.source[0].number,
|
||||
critical: true,
|
||||
});
|
||||
return spec;
|
||||
}
|
||||
const nameToken = name;
|
||||
if (name[0] === '[' && name[name.length - 1] === ']') {
|
||||
optional = true;
|
||||
name = name.slice(1, -1);
|
||||
const parts = name.split('=');
|
||||
name = parts[0].trim();
|
||||
if (parts[1] !== undefined)
|
||||
defaultValue = parts.slice(1).join('=').trim();
|
||||
if (name === '') {
|
||||
spec.problems.push({
|
||||
code: 'spec:name:empty-name',
|
||||
message: 'empty name',
|
||||
line: spec.source[0].number,
|
||||
critical: true,
|
||||
});
|
||||
return spec;
|
||||
}
|
||||
if (defaultValue === '') {
|
||||
spec.problems.push({
|
||||
code: 'spec:name:empty-default',
|
||||
message: 'empty default value',
|
||||
line: spec.source[0].number,
|
||||
critical: true,
|
||||
});
|
||||
return spec;
|
||||
}
|
||||
// has "=" and is not a string, except for "=>"
|
||||
if (!isQuoted(defaultValue) && /=(?!>)/.test(defaultValue)) {
|
||||
spec.problems.push({
|
||||
code: 'spec:name:invalid-default',
|
||||
message: 'invalid default value syntax',
|
||||
line: spec.source[0].number,
|
||||
critical: true,
|
||||
});
|
||||
return spec;
|
||||
}
|
||||
}
|
||||
if (!optional) {
|
||||
const eqIndex = name.search(/=(?!>)/);
|
||||
if (eqIndex !== -1) {
|
||||
defaultValue = name.slice(eqIndex + 1).trim();
|
||||
name = name.slice(0, eqIndex).trim();
|
||||
if (name === '') {
|
||||
spec.problems.push({
|
||||
code: 'spec:name:empty-name',
|
||||
message: 'empty name',
|
||||
line: spec.source[0].number,
|
||||
critical: true,
|
||||
});
|
||||
return spec;
|
||||
}
|
||||
if (defaultValue === '') {
|
||||
spec.problems.push({
|
||||
code: 'spec:name:empty-default',
|
||||
message: 'empty default value',
|
||||
line: spec.source[0].number,
|
||||
critical: true,
|
||||
});
|
||||
return spec;
|
||||
}
|
||||
if (!isQuoted(defaultValue) && /=(?!>)/.test(defaultValue)) {
|
||||
spec.problems.push({
|
||||
code: 'spec:name:invalid-default',
|
||||
message: 'invalid default value syntax',
|
||||
line: spec.source[0].number,
|
||||
critical: true,
|
||||
});
|
||||
return spec;
|
||||
}
|
||||
}
|
||||
}
|
||||
spec.optional = optional;
|
||||
spec.name = name;
|
||||
tokens.name = nameToken;
|
||||
if (defaultValue !== undefined)
|
||||
spec.default = defaultValue;
|
||||
[tokens.postName, tokens.description] = splitSpace(source.slice(tokens.name.length));
|
||||
return spec;
|
||||
};
|
||||
}
|
||||
+6
@@ -0,0 +1,6 @@
|
||||
import { Tokenizer } from './index.js';
|
||||
/**
|
||||
* Splits the `@prefix` from remaining `Spec.lines[].token.description` into the `tag` token,
|
||||
* and populates `spec.tag`
|
||||
*/
|
||||
export default function tagTokenizer(): Tokenizer;
|
||||
+27
@@ -0,0 +1,27 @@
|
||||
/**
|
||||
* Splits the `@prefix` from remaining `Spec.lines[].token.description` into the `tag` token,
|
||||
* and populates `spec.tag`
|
||||
*/
|
||||
export default function tagTokenizer() {
|
||||
return (spec) => {
|
||||
const { tokens } = spec.source[0];
|
||||
const match = tokens.description.match(/\s*(@(\S+))(\s*)/);
|
||||
if (match === null) {
|
||||
spec.problems.push({
|
||||
code: 'spec:tag:prefix',
|
||||
message: 'tag should start with "@" symbol',
|
||||
line: spec.source[0].number,
|
||||
critical: true,
|
||||
});
|
||||
return spec;
|
||||
}
|
||||
if (match[1].includes('/')) {
|
||||
return spec;
|
||||
}
|
||||
tokens.tag = match[1];
|
||||
tokens.postTag = match[3];
|
||||
tokens.description = tokens.description.slice(match[0].length);
|
||||
spec.tag = match[2];
|
||||
return spec;
|
||||
};
|
||||
}
|
||||
+27
@@ -0,0 +1,27 @@
|
||||
import { Tokenizer } from './index.js';
|
||||
/**
|
||||
* Joiner is a function taking collected type token string parts,
|
||||
* and joining them together. In most of the cases this will be
|
||||
* a single piece like {type-name}, but type may go over multipe line
|
||||
* ```
|
||||
* @tag {function(
|
||||
* number,
|
||||
* string
|
||||
* )}
|
||||
* ```
|
||||
*/
|
||||
export type Joiner = (parts: string[]) => string;
|
||||
/**
|
||||
* Shortcut for standard Joiners
|
||||
* compact - trim surrounding space, replace line breaks with a single space
|
||||
* preserve - concat as is
|
||||
*/
|
||||
export type Spacing = 'compact' | 'preserve' | Joiner;
|
||||
/**
|
||||
* Sets splits remaining `Spec.lines[].tokens.description` into `type` and `description`
|
||||
* tokens and populates Spec.type`
|
||||
*
|
||||
* @param {Spacing} spacing tells how to deal with a whitespace
|
||||
* for type values going over multiple lines
|
||||
*/
|
||||
export default function typeTokenizer(spacing?: Spacing): Tokenizer;
|
||||
+77
@@ -0,0 +1,77 @@
|
||||
import { splitSpace } from '../../util.js';
|
||||
/**
|
||||
* Sets splits remaining `Spec.lines[].tokens.description` into `type` and `description`
|
||||
* tokens and populates Spec.type`
|
||||
*
|
||||
* @param {Spacing} spacing tells how to deal with a whitespace
|
||||
* for type values going over multiple lines
|
||||
*/
|
||||
export default function typeTokenizer(spacing = 'compact') {
|
||||
const join = getJoiner(spacing);
|
||||
return (spec) => {
|
||||
let curlies = 0;
|
||||
let lines = [];
|
||||
let descriptionBegun = false;
|
||||
let firstTypeIteration = true;
|
||||
for (const { tokens } of spec.source.values()) {
|
||||
let type = '';
|
||||
if (!descriptionBegun && tokens.description.trim()) {
|
||||
descriptionBegun = true;
|
||||
}
|
||||
else if (!descriptionBegun) {
|
||||
continue;
|
||||
}
|
||||
if (firstTypeIteration && tokens.description[0] !== '{')
|
||||
return spec;
|
||||
firstTypeIteration = false;
|
||||
for (const ch of tokens.description) {
|
||||
if (ch === '{')
|
||||
curlies++;
|
||||
if (ch === '}')
|
||||
curlies--;
|
||||
type += ch;
|
||||
if (curlies === 0)
|
||||
break;
|
||||
}
|
||||
lines.push([tokens, type]);
|
||||
if (curlies === 0)
|
||||
break;
|
||||
}
|
||||
if (!descriptionBegun) {
|
||||
return spec;
|
||||
}
|
||||
if (curlies !== 0) {
|
||||
spec.problems.push({
|
||||
code: 'spec:type:unpaired-curlies',
|
||||
message: 'unpaired curlies',
|
||||
line: spec.source[0].number,
|
||||
critical: true,
|
||||
});
|
||||
return spec;
|
||||
}
|
||||
const parts = [];
|
||||
const offset = lines[0][0].postDelimiter.length;
|
||||
for (const [i, [tokens, type]] of lines.entries()) {
|
||||
tokens.type = type;
|
||||
if (i > 0) {
|
||||
tokens.type = tokens.postDelimiter.slice(offset) + type;
|
||||
tokens.postDelimiter = tokens.postDelimiter.slice(0, offset);
|
||||
}
|
||||
[tokens.postType, tokens.description] = splitSpace(tokens.description.slice(type.length));
|
||||
parts.push(tokens.type);
|
||||
}
|
||||
parts[0] = parts[0].slice(1);
|
||||
parts[parts.length - 1] = parts[parts.length - 1].slice(0, -1);
|
||||
spec.type = join(parts);
|
||||
return spec;
|
||||
};
|
||||
}
|
||||
const trim = (x) => x.trim();
|
||||
function getJoiner(spacing) {
|
||||
if (spacing === 'compact')
|
||||
return (t) => t.map(trim).join('');
|
||||
else if (spacing === 'preserve')
|
||||
return (t) => t.join('\n');
|
||||
else
|
||||
return spacing;
|
||||
}
|
||||
+54
@@ -0,0 +1,54 @@
|
||||
/** @deprecated */
|
||||
export declare enum Markers {
|
||||
start = "/**",
|
||||
nostart = "/***",
|
||||
delim = "*",
|
||||
end = "*/"
|
||||
}
|
||||
export interface BlockMarkers {
|
||||
start: string;
|
||||
nostart: string;
|
||||
delim: string;
|
||||
end: string;
|
||||
}
|
||||
export interface Block {
|
||||
description: string;
|
||||
tags: Spec[];
|
||||
source: Line[];
|
||||
problems: Problem[];
|
||||
}
|
||||
export interface Spec {
|
||||
tag: string;
|
||||
name: string;
|
||||
default?: string;
|
||||
type: string;
|
||||
optional: boolean;
|
||||
description: string;
|
||||
problems: Problem[];
|
||||
source: Line[];
|
||||
}
|
||||
export interface Line {
|
||||
number: number;
|
||||
source: string;
|
||||
tokens: Tokens;
|
||||
}
|
||||
export interface Tokens {
|
||||
start: string;
|
||||
delimiter: string;
|
||||
postDelimiter: string;
|
||||
tag: string;
|
||||
postTag: string;
|
||||
name: string;
|
||||
postName: string;
|
||||
type: string;
|
||||
postType: string;
|
||||
description: string;
|
||||
end: string;
|
||||
lineEnd: string;
|
||||
}
|
||||
export interface Problem {
|
||||
code: 'unhandled' | 'custom' | 'source:startline' | 'spec:tag:prefix' | 'spec:type:unpaired-curlies' | 'spec:name:unpaired-brackets' | 'spec:name:empty-name' | 'spec:name:invalid-default' | 'spec:name:empty-default';
|
||||
message: string;
|
||||
line: number;
|
||||
critical: boolean;
|
||||
}
|
||||
+8
@@ -0,0 +1,8 @@
|
||||
/** @deprecated */
|
||||
export var Markers;
|
||||
(function (Markers) {
|
||||
Markers["start"] = "/**";
|
||||
Markers["nostart"] = "/***";
|
||||
Markers["delim"] = "*";
|
||||
Markers["end"] = "*/";
|
||||
})(Markers || (Markers = {}));
|
||||
+3
@@ -0,0 +1,3 @@
|
||||
import { Block } from '../primitives.js';
|
||||
export type Stringifier = (block: Block) => string;
|
||||
export default function getStringifier(): Stringifier;
|
||||
+17
@@ -0,0 +1,17 @@
|
||||
function join(tokens) {
|
||||
return (tokens.start +
|
||||
tokens.delimiter +
|
||||
tokens.postDelimiter +
|
||||
tokens.tag +
|
||||
tokens.postTag +
|
||||
tokens.type +
|
||||
tokens.postType +
|
||||
tokens.name +
|
||||
tokens.postName +
|
||||
tokens.description +
|
||||
tokens.end +
|
||||
tokens.lineEnd);
|
||||
}
|
||||
export default function getStringifier() {
|
||||
return (block) => block.source.map(({ tokens }) => join(tokens)).join('\n');
|
||||
}
|
||||
+2
@@ -0,0 +1,2 @@
|
||||
import { Block } from '../primitives.js';
|
||||
export default function inspect({ source }: Block): string;
|
||||
+44
@@ -0,0 +1,44 @@
|
||||
import { isSpace } from '../util.js';
|
||||
const zeroWidth = {
|
||||
line: 0,
|
||||
start: 0,
|
||||
delimiter: 0,
|
||||
postDelimiter: 0,
|
||||
tag: 0,
|
||||
postTag: 0,
|
||||
name: 0,
|
||||
postName: 0,
|
||||
type: 0,
|
||||
postType: 0,
|
||||
description: 0,
|
||||
end: 0,
|
||||
lineEnd: 0,
|
||||
};
|
||||
const headers = { lineEnd: 'CR' };
|
||||
const fields = Object.keys(zeroWidth);
|
||||
const repr = (x) => (isSpace(x) ? `{${x.length}}` : x);
|
||||
const frame = (line) => '|' + line.join('|') + '|';
|
||||
const align = (width, tokens) => Object.keys(tokens).map((k) => repr(tokens[k]).padEnd(width[k]));
|
||||
export default function inspect({ source }) {
|
||||
var _a, _b;
|
||||
if (source.length === 0)
|
||||
return '';
|
||||
const width = Object.assign({}, zeroWidth);
|
||||
for (const f of fields)
|
||||
width[f] = ((_a = headers[f]) !== null && _a !== void 0 ? _a : f).length;
|
||||
for (const { number, tokens } of source) {
|
||||
width.line = Math.max(width.line, number.toString().length);
|
||||
for (const k in tokens)
|
||||
width[k] = Math.max(width[k], repr(tokens[k]).length);
|
||||
}
|
||||
const lines = [[], []];
|
||||
for (const f of fields)
|
||||
lines[0].push(((_b = headers[f]) !== null && _b !== void 0 ? _b : f).padEnd(width[f]));
|
||||
for (const f of fields)
|
||||
lines[1].push('-'.padEnd(width[f], '-'));
|
||||
for (const { number, tokens } of source) {
|
||||
const line = number.toString().padStart(width.line);
|
||||
lines.push([line, ...align(width, tokens)]);
|
||||
}
|
||||
return lines.map(frame).join('\n');
|
||||
}
|
||||
+3
@@ -0,0 +1,3 @@
|
||||
import { Transform } from './index.js';
|
||||
import { Markers } from '../primitives.js';
|
||||
export default function align(markers?: typeof Markers): Transform;
|
||||
+93
@@ -0,0 +1,93 @@
|
||||
var __rest = (this && this.__rest) || function (s, e) {
|
||||
var t = {};
|
||||
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
|
||||
t[p] = s[p];
|
||||
if (s != null && typeof Object.getOwnPropertySymbols === "function")
|
||||
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
|
||||
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
|
||||
t[p[i]] = s[p[i]];
|
||||
}
|
||||
return t;
|
||||
};
|
||||
import { Markers } from '../primitives.js';
|
||||
import { rewireSource } from '../util.js';
|
||||
const zeroWidth = {
|
||||
start: 0,
|
||||
tag: 0,
|
||||
type: 0,
|
||||
name: 0,
|
||||
};
|
||||
const getWidth = (markers = Markers) => (w, { tokens: t }) => ({
|
||||
start: t.delimiter === markers.start ? t.start.length : w.start,
|
||||
tag: Math.max(w.tag, t.tag.length),
|
||||
type: Math.max(w.type, t.type.length),
|
||||
name: Math.max(w.name, t.name.length),
|
||||
});
|
||||
const space = (len) => ''.padStart(len, ' ');
|
||||
export default function align(markers = Markers) {
|
||||
let intoTags = false;
|
||||
let w;
|
||||
function update(line) {
|
||||
const tokens = Object.assign({}, line.tokens);
|
||||
if (tokens.tag !== '')
|
||||
intoTags = true;
|
||||
const isEmpty = tokens.tag === '' &&
|
||||
tokens.name === '' &&
|
||||
tokens.type === '' &&
|
||||
tokens.description === '';
|
||||
// dangling '*/'
|
||||
if (tokens.end === markers.end && isEmpty) {
|
||||
tokens.start = space(w.start + 1);
|
||||
return Object.assign(Object.assign({}, line), { tokens });
|
||||
}
|
||||
switch (tokens.delimiter) {
|
||||
case markers.start:
|
||||
tokens.start = space(w.start);
|
||||
break;
|
||||
case markers.delim:
|
||||
tokens.start = space(w.start + 1);
|
||||
break;
|
||||
default:
|
||||
tokens.delimiter = '';
|
||||
tokens.start = space(w.start + 2); // compensate delimiter
|
||||
}
|
||||
if (!intoTags) {
|
||||
tokens.postDelimiter = tokens.description === '' ? '' : ' ';
|
||||
return Object.assign(Object.assign({}, line), { tokens });
|
||||
}
|
||||
const nothingAfter = {
|
||||
delim: false,
|
||||
tag: false,
|
||||
type: false,
|
||||
name: false,
|
||||
};
|
||||
if (tokens.description === '') {
|
||||
nothingAfter.name = true;
|
||||
tokens.postName = '';
|
||||
if (tokens.name === '') {
|
||||
nothingAfter.type = true;
|
||||
tokens.postType = '';
|
||||
if (tokens.type === '') {
|
||||
nothingAfter.tag = true;
|
||||
tokens.postTag = '';
|
||||
if (tokens.tag === '') {
|
||||
nothingAfter.delim = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
tokens.postDelimiter = nothingAfter.delim ? '' : ' ';
|
||||
if (!nothingAfter.tag)
|
||||
tokens.postTag = space(w.tag - tokens.tag.length + 1);
|
||||
if (!nothingAfter.type)
|
||||
tokens.postType = space(w.type - tokens.type.length + 1);
|
||||
if (!nothingAfter.name)
|
||||
tokens.postName = space(w.name - tokens.name.length + 1);
|
||||
return Object.assign(Object.assign({}, line), { tokens });
|
||||
}
|
||||
return (_a) => {
|
||||
var { source } = _a, fields = __rest(_a, ["source"]);
|
||||
w = source.reduce(getWidth(markers), Object.assign({}, zeroWidth));
|
||||
return rewireSource(Object.assign(Object.assign({}, fields), { source: source.map(update) }));
|
||||
};
|
||||
}
|
||||
+3
@@ -0,0 +1,3 @@
|
||||
import { Transform } from './index.js';
|
||||
export type Ending = 'LF' | 'CRLF';
|
||||
export default function crlf(ending: Ending): Transform;
|
||||
+34
@@ -0,0 +1,34 @@
|
||||
var __rest = (this && this.__rest) || function (s, e) {
|
||||
var t = {};
|
||||
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
|
||||
t[p] = s[p];
|
||||
if (s != null && typeof Object.getOwnPropertySymbols === "function")
|
||||
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
|
||||
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
|
||||
t[p[i]] = s[p[i]];
|
||||
}
|
||||
return t;
|
||||
};
|
||||
import { rewireSource } from '../util.js';
|
||||
const order = [
|
||||
'end',
|
||||
'description',
|
||||
'postType',
|
||||
'type',
|
||||
'postName',
|
||||
'name',
|
||||
'postTag',
|
||||
'tag',
|
||||
'postDelimiter',
|
||||
'delimiter',
|
||||
'start',
|
||||
];
|
||||
export default function crlf(ending) {
|
||||
function update(line) {
|
||||
return Object.assign(Object.assign({}, line), { tokens: Object.assign(Object.assign({}, line.tokens), { lineEnd: ending === 'LF' ? '' : '\r' }) });
|
||||
}
|
||||
return (_a) => {
|
||||
var { source } = _a, fields = __rest(_a, ["source"]);
|
||||
return rewireSource(Object.assign(Object.assign({}, fields), { source: source.map(update) }));
|
||||
};
|
||||
}
|
||||
+2
@@ -0,0 +1,2 @@
|
||||
import { Transform } from './index.js';
|
||||
export default function indent(pos: number): Transform;
|
||||
+32
@@ -0,0 +1,32 @@
|
||||
var __rest = (this && this.__rest) || function (s, e) {
|
||||
var t = {};
|
||||
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
|
||||
t[p] = s[p];
|
||||
if (s != null && typeof Object.getOwnPropertySymbols === "function")
|
||||
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
|
||||
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
|
||||
t[p[i]] = s[p[i]];
|
||||
}
|
||||
return t;
|
||||
};
|
||||
import { rewireSource } from '../util.js';
|
||||
const pull = (offset) => (str) => str.slice(offset);
|
||||
const push = (offset) => {
|
||||
const space = ''.padStart(offset, ' ');
|
||||
return (str) => str + space;
|
||||
};
|
||||
export default function indent(pos) {
|
||||
let shift;
|
||||
const pad = (start) => {
|
||||
if (shift === undefined) {
|
||||
const offset = pos - start.length;
|
||||
shift = offset > 0 ? push(offset) : pull(-offset);
|
||||
}
|
||||
return shift(start);
|
||||
};
|
||||
const update = (line) => (Object.assign(Object.assign({}, line), { tokens: Object.assign(Object.assign({}, line.tokens), { start: pad(line.tokens.start) }) }));
|
||||
return (_a) => {
|
||||
var { source } = _a, fields = __rest(_a, ["source"]);
|
||||
return rewireSource(Object.assign(Object.assign({}, fields), { source: source.map(update) }));
|
||||
};
|
||||
}
|
||||
+3
@@ -0,0 +1,3 @@
|
||||
import { Block } from '../primitives.js';
|
||||
export type Transform = (Block: Block) => Block;
|
||||
export declare function flow(...transforms: Transform[]): Transform;
|
||||
+3
@@ -0,0 +1,3 @@
|
||||
export function flow(...transforms) {
|
||||
return (block) => transforms.reduce((block, t) => t(block), block);
|
||||
}
|
||||
+21
@@ -0,0 +1,21 @@
|
||||
import { Block, Tokens, Spec } from './primitives.js';
|
||||
export declare function isSpace(source: string): boolean;
|
||||
export declare function hasCR(source: string): boolean;
|
||||
export declare function splitCR(source: string): [string, string];
|
||||
export declare function splitSpace(source: string): [string, string];
|
||||
export declare function splitLines(source: string): string[];
|
||||
export declare function seedBlock(block?: Partial<Block>): Block;
|
||||
export declare function seedSpec(spec?: Partial<Spec>): Spec;
|
||||
export declare function seedTokens(tokens?: Partial<Tokens>): Tokens;
|
||||
/**
|
||||
* Assures Block.tags[].source contains references to the Block.source items,
|
||||
* using Block.source as a source of truth. This is a counterpart of rewireSpecs
|
||||
* @param block parsed coments block
|
||||
*/
|
||||
export declare function rewireSource(block: Block): Block;
|
||||
/**
|
||||
* Assures Block.source contains references to the Block.tags[].source items,
|
||||
* using Block.tags[].source as a source of truth. This is a counterpart of rewireSource
|
||||
* @param block parsed coments block
|
||||
*/
|
||||
export declare function rewireSpecs(block: Block): Block;
|
||||
+52
@@ -0,0 +1,52 @@
|
||||
export function isSpace(source) {
|
||||
return /^\s+$/.test(source);
|
||||
}
|
||||
export function hasCR(source) {
|
||||
return /\r$/.test(source);
|
||||
}
|
||||
export function splitCR(source) {
|
||||
const matches = source.match(/\r+$/);
|
||||
return matches == null
|
||||
? ['', source]
|
||||
: [source.slice(-matches[0].length), source.slice(0, -matches[0].length)];
|
||||
}
|
||||
export function splitSpace(source) {
|
||||
const matches = source.match(/^\s+/);
|
||||
return matches == null
|
||||
? ['', source]
|
||||
: [source.slice(0, matches[0].length), source.slice(matches[0].length)];
|
||||
}
|
||||
export function splitLines(source) {
|
||||
return source.split(/\n/);
|
||||
}
|
||||
export function seedBlock(block = {}) {
|
||||
return Object.assign({ description: '', tags: [], source: [], problems: [] }, block);
|
||||
}
|
||||
export function seedSpec(spec = {}) {
|
||||
return Object.assign({ tag: '', name: '', type: '', optional: false, description: '', problems: [], source: [] }, spec);
|
||||
}
|
||||
export function seedTokens(tokens = {}) {
|
||||
return Object.assign({ start: '', delimiter: '', postDelimiter: '', tag: '', postTag: '', name: '', postName: '', type: '', postType: '', description: '', end: '', lineEnd: '' }, tokens);
|
||||
}
|
||||
/**
|
||||
* Assures Block.tags[].source contains references to the Block.source items,
|
||||
* using Block.source as a source of truth. This is a counterpart of rewireSpecs
|
||||
* @param block parsed coments block
|
||||
*/
|
||||
export function rewireSource(block) {
|
||||
const source = block.source.reduce((acc, line) => acc.set(line.number, line), new Map());
|
||||
for (const spec of block.tags) {
|
||||
spec.source = spec.source.map((line) => source.get(line.number));
|
||||
}
|
||||
return block;
|
||||
}
|
||||
/**
|
||||
* Assures Block.source contains references to the Block.tags[].source items,
|
||||
* using Block.tags[].source as a source of truth. This is a counterpart of rewireSource
|
||||
* @param block parsed coments block
|
||||
*/
|
||||
export function rewireSpecs(block) {
|
||||
const source = block.tags.reduce((acc, spec) => spec.source.reduce((acc, line) => acc.set(line.number, line), acc), new Map());
|
||||
block.source = block.source.map((line) => source.get(line.number) || line);
|
||||
return block;
|
||||
}
|
||||
+207
@@ -0,0 +1,207 @@
|
||||
'use strict';
|
||||
|
||||
// For a detailed explanation regarding each configuration property, visit:
|
||||
// https://jestjs.io/docs/en/configuration.html
|
||||
|
||||
const { compilerOptions: tsconfig } = JSON.parse(
|
||||
require('fs').readFileSync('./tsconfig.node.json')
|
||||
);
|
||||
|
||||
module.exports = {
|
||||
// All imported modules in your tests should be mocked automatically
|
||||
// automock: false,
|
||||
|
||||
// Stop running tests after `n` failures
|
||||
// bail: 0,
|
||||
|
||||
// The directory where Jest should store its cached dependency information
|
||||
// cacheDirectory: "/private/var/folders/_g/g97k3tbx31x08qqy2z18kxq80000gn/T/jest_dx",
|
||||
|
||||
// Automatically clear mock calls and instances between every test
|
||||
// clearMocks: false,
|
||||
|
||||
// Indicates whether the coverage information should be collected while executing the test
|
||||
collectCoverage: true,
|
||||
|
||||
// An array of glob patterns indicating a set of files for which coverage information should be collected
|
||||
// collectCoverageFrom: undefined,
|
||||
|
||||
// The directory where Jest should output its coverage files
|
||||
// coverageDirectory: ".coverage",
|
||||
|
||||
// An array of regexp pattern strings used to skip coverage collection
|
||||
coveragePathIgnorePatterns: ['/node_modules/', '/lib/', '/tests/'],
|
||||
|
||||
// Indicates which provider should be used to instrument code for coverage
|
||||
coverageProvider: 'v8',
|
||||
|
||||
// A list of reporter names that Jest uses when writing coverage reports
|
||||
// coverageReporters: [
|
||||
// "json",
|
||||
// "text",
|
||||
// "lcov",
|
||||
// "clover"
|
||||
// ],
|
||||
|
||||
// An object that configures minimum threshold enforcement for coverage results
|
||||
// coverageThreshold: {
|
||||
// global : {
|
||||
// branches: 85,
|
||||
// functions: 85,
|
||||
// lines: 85,
|
||||
// statements: 85
|
||||
// }
|
||||
// },
|
||||
|
||||
// A path to a custom dependency extractor
|
||||
// dependencyExtractor: undefined,
|
||||
|
||||
// Make calling deprecated APIs throw helpful error messages
|
||||
// errorOnDeprecated: false,
|
||||
|
||||
// Force coverage collection from ignored files using an array of glob patterns
|
||||
// forceCoverageMatch: [],
|
||||
|
||||
// A path to a module which exports an async function that is triggered once before all test suites
|
||||
// globalSetup: undefined,
|
||||
|
||||
// A path to a module which exports an async function that is triggered once after all test suites
|
||||
// globalTeardown: undefined,
|
||||
|
||||
// A set of global variables that need to be available in all test environments
|
||||
// globals: {},
|
||||
|
||||
// The maximum amount of workers used to run your tests. Can be specified as % or a number. E.g. maxWorkers: 10% will use 10% of your CPU amount + 1 as the maximum worker number. maxWorkers: 2 will use a maximum of 2 workers.
|
||||
// maxWorkers: "50%",
|
||||
|
||||
// An array of directory names to be searched recursively up from the requiring module's location
|
||||
// moduleDirectories: [
|
||||
// "node_modules"
|
||||
// ],
|
||||
|
||||
// An array of file extensions your modules use
|
||||
// moduleFileExtensions: [
|
||||
// "js",
|
||||
// "json",
|
||||
// "jsx",
|
||||
// "ts",
|
||||
// "tsx",
|
||||
// "node"
|
||||
// ],
|
||||
|
||||
// A map from regular expressions to module names or to arrays of module names that allow to stub out resources with a single module
|
||||
moduleNameMapper: {
|
||||
[/(.+)\.js$/.source]: ['$1.js', '$1.ts']
|
||||
},
|
||||
|
||||
// An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader
|
||||
// modulePathIgnorePatterns: [],
|
||||
|
||||
// Activates notifications for test results
|
||||
// notify: false,
|
||||
|
||||
// An enum that specifies notification mode. Requires { notify: true }
|
||||
// notifyMode: "failure-change",
|
||||
|
||||
// A preset that is used as a base for Jest's configuration
|
||||
preset: 'ts-jest',
|
||||
|
||||
// Run tests from one or more projects
|
||||
// projects: undefined,
|
||||
|
||||
// Use this configuration option to add custom reporters to Jest
|
||||
// reporters: undefined,
|
||||
|
||||
// Automatically reset mock state between every test
|
||||
// resetMocks: false,
|
||||
|
||||
// Reset the module registry before running each individual test
|
||||
// resetModules: false,
|
||||
|
||||
// A path to a custom resolver
|
||||
// resolver: undefined,
|
||||
|
||||
// Automatically restore mock state between every test
|
||||
// restoreMocks: false,
|
||||
|
||||
// The root directory that Jest should scan for tests and modules within
|
||||
// rootDir: undefined,
|
||||
|
||||
// A list of paths to directories that Jest should use to search for files in
|
||||
roots: ['<rootDir>/tests/'],
|
||||
|
||||
// Allows you to use a custom runner instead of Jest's default test runner
|
||||
// runner: "jest-runner",
|
||||
|
||||
// The paths to modules that run some code to configure or set up the testing environment before each test
|
||||
// setupFiles: [],
|
||||
|
||||
// A list of paths to modules that run some code to configure or set up the testing framework before each test
|
||||
// setupFilesAfterEnv: [],
|
||||
|
||||
// The number of seconds after which a test is considered as slow and reported as such in the results.
|
||||
// slowTestThreshold: 5,
|
||||
|
||||
// A list of paths to snapshot serializer modules Jest should use for snapshot testing
|
||||
// snapshotSerializers: [],
|
||||
|
||||
// The test environment that will be used for testing
|
||||
testEnvironment: 'node',
|
||||
|
||||
// Options that will be passed to the testEnvironment
|
||||
// testEnvironmentOptions: {},
|
||||
|
||||
// Adds a location field to test results
|
||||
// testLocationInResults: false,
|
||||
|
||||
// The glob patterns Jest uses to detect test files
|
||||
// testMatch: [
|
||||
// "**/__tests__/**/*.[jt]s?(x)",
|
||||
// "**/?(*.)+(spec|test).[tj]s?(x)"
|
||||
// ],
|
||||
|
||||
// An array of regexp pattern strings that are matched against all test paths, matched tests are skipped
|
||||
// testPathIgnorePatterns: [
|
||||
// "/node_modules/"
|
||||
// ],
|
||||
|
||||
// The regexp pattern or array of patterns that Jest uses to detect test files
|
||||
// testRegex: [],
|
||||
|
||||
// This option allows the use of a custom results processor
|
||||
// testResultsProcessor: undefined,
|
||||
|
||||
// This option allows use of a custom test runner
|
||||
// testRunner: "jasmine2",
|
||||
|
||||
// This option sets the URL for the jsdom environment. It is reflected in properties such as location.href
|
||||
// testURL: "http://localhost",
|
||||
|
||||
// Setting this value to "fake" allows the use of fake timers for functions such as "setTimeout"
|
||||
// timers: "real",
|
||||
|
||||
// A map from regular expressions to paths to transformers
|
||||
transform: {
|
||||
'^.+\\.ts$': ['ts-jest', {
|
||||
tsconfig
|
||||
}],
|
||||
},
|
||||
|
||||
// An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation
|
||||
// transformIgnorePatterns: [
|
||||
// "/node_modules/",
|
||||
// "\\.pnp\\.[^\\/]+$"
|
||||
// ],
|
||||
|
||||
// An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them
|
||||
// unmockedModulePathPatterns: undefined,
|
||||
|
||||
// Indicates whether each individual test should be reported during the run
|
||||
// verbose: undefined,
|
||||
|
||||
// An array of regexp patterns that are matched against all source file paths before re-running tests in watch mode
|
||||
// watchPathIgnorePatterns: [],
|
||||
|
||||
// Whether to use watchman for file crawling
|
||||
// watchman: true,
|
||||
};
|
||||
+68
@@ -0,0 +1,68 @@
|
||||
"use strict";
|
||||
|
||||
var __createBinding = this && this.__createBinding || (Object.create ? function (o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return m[k];
|
||||
}
|
||||
};
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
} : function (o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
});
|
||||
var __exportStar = this && this.__exportStar || function (m, exports) {
|
||||
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.util = exports.tokenizers = exports.transforms = exports.inspect = exports.stringify = void 0;
|
||||
exports.parse = parse;
|
||||
const index_js_1 = require("./parser/index.cjs");
|
||||
const description_js_1 = require("./parser/tokenizers/description.cjs");
|
||||
const name_js_1 = require("./parser/tokenizers/name.cjs");
|
||||
const tag_js_1 = require("./parser/tokenizers/tag.cjs");
|
||||
const type_js_1 = require("./parser/tokenizers/type.cjs");
|
||||
const index_js_2 = require("./stringifier/index.cjs");
|
||||
const align_js_1 = require("./transforms/align.cjs");
|
||||
const indent_js_1 = require("./transforms/indent.cjs");
|
||||
const crlf_js_1 = require("./transforms/crlf.cjs");
|
||||
const index_js_3 = require("./transforms/index.cjs");
|
||||
const util_js_1 = require("./util.cjs");
|
||||
__exportStar(require("./primitives.cjs"), exports);
|
||||
function parse(source, options = {}) {
|
||||
return (0, index_js_1.default)(options)(source);
|
||||
}
|
||||
exports.stringify = (0, index_js_2.default)();
|
||||
var inspect_js_1 = require("./stringifier/inspect.cjs");
|
||||
Object.defineProperty(exports, "inspect", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return inspect_js_1.default;
|
||||
}
|
||||
});
|
||||
exports.transforms = {
|
||||
flow: index_js_3.flow,
|
||||
align: align_js_1.default,
|
||||
indent: indent_js_1.default,
|
||||
crlf: crlf_js_1.default
|
||||
};
|
||||
exports.tokenizers = {
|
||||
tag: tag_js_1.default,
|
||||
type: type_js_1.default,
|
||||
name: name_js_1.default,
|
||||
description: description_js_1.default
|
||||
};
|
||||
exports.util = {
|
||||
rewireSpecs: util_js_1.rewireSpecs,
|
||||
rewireSource: util_js_1.rewireSource,
|
||||
seedBlock: util_js_1.seedBlock,
|
||||
seedTokens: util_js_1.seedTokens
|
||||
};
|
||||
//# sourceMappingURL=index.cjs.map
|
||||
+1
File diff suppressed because one or more lines are too long
+32
@@ -0,0 +1,32 @@
|
||||
import { Options as ParserOptions } from './parser/index.js';
|
||||
import descriptionTokenizer from './parser/tokenizers/description.js';
|
||||
import nameTokenizer from './parser/tokenizers/name.js';
|
||||
import tagTokenizer from './parser/tokenizers/tag.js';
|
||||
import typeTokenizer from './parser/tokenizers/type.js';
|
||||
import alignTransform from './transforms/align.js';
|
||||
import indentTransform from './transforms/indent.js';
|
||||
import crlfTransform from './transforms/crlf.js';
|
||||
import { flow as flowTransform } from './transforms/index.js';
|
||||
import { rewireSpecs, rewireSource, seedBlock, seedTokens } from './util.js';
|
||||
export * from './primitives.js';
|
||||
export declare function parse(source: string, options?: Partial<ParserOptions>): import("./primitives.js").Block[];
|
||||
export declare const stringify: import("./stringifier/index.js").Stringifier;
|
||||
export { default as inspect } from './stringifier/inspect.js';
|
||||
export declare const transforms: {
|
||||
flow: typeof flowTransform;
|
||||
align: typeof alignTransform;
|
||||
indent: typeof indentTransform;
|
||||
crlf: typeof crlfTransform;
|
||||
};
|
||||
export declare const tokenizers: {
|
||||
tag: typeof tagTokenizer;
|
||||
type: typeof typeTokenizer;
|
||||
name: typeof nameTokenizer;
|
||||
description: typeof descriptionTokenizer;
|
||||
};
|
||||
export declare const util: {
|
||||
rewireSpecs: typeof rewireSpecs;
|
||||
rewireSource: typeof rewireSource;
|
||||
seedBlock: typeof seedBlock;
|
||||
seedTokens: typeof seedTokens;
|
||||
};
|
||||
+36
@@ -0,0 +1,36 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = getParser;
|
||||
const reTag = /^@[^\s/]+(?=\s|$)/;
|
||||
/**
|
||||
* Creates configured `Parser`
|
||||
* @param {Partial<Options>} options
|
||||
*/
|
||||
function getParser({
|
||||
fence = '```'
|
||||
} = {}) {
|
||||
const fencer = getFencer(fence);
|
||||
const toggleFence = (source, isFenced) => fencer(source) ? !isFenced : isFenced;
|
||||
return function parseBlock(source) {
|
||||
// start with description section
|
||||
const sections = [[]];
|
||||
let isFenced = false;
|
||||
for (const line of source) {
|
||||
if (reTag.test(line.tokens.description) && !isFenced) {
|
||||
sections.push([line]);
|
||||
} else {
|
||||
sections[sections.length - 1].push(line);
|
||||
}
|
||||
isFenced = toggleFence(line.tokens.description, isFenced);
|
||||
}
|
||||
return sections;
|
||||
};
|
||||
}
|
||||
function getFencer(fence) {
|
||||
if (typeof fence === 'string') return source => source.split(fence).length % 2 === 0;
|
||||
return fence;
|
||||
}
|
||||
//# sourceMappingURL=block-parser.cjs.map
|
||||
+1
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"block-parser.cjs","names":["Object","defineProperty","exports","value","default","getParser","reTag","fence","fencer","getFencer","toggleFence","source","isFenced","parseBlock","sections","line","test","tokens","description","push","length","split"],"sources":["block-parser.js"],"sourcesContent":["\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.default = getParser;\nconst reTag = /^@[^\\s/]+(?=\\s|$)/;\n/**\n * Creates configured `Parser`\n * @param {Partial<Options>} options\n */\nfunction getParser({ fence = '```', } = {}) {\n const fencer = getFencer(fence);\n const toggleFence = (source, isFenced) => fencer(source) ? !isFenced : isFenced;\n return function parseBlock(source) {\n // start with description section\n const sections = [[]];\n let isFenced = false;\n for (const line of source) {\n if (reTag.test(line.tokens.description) && !isFenced) {\n sections.push([line]);\n }\n else {\n sections[sections.length - 1].push(line);\n }\n isFenced = toggleFence(line.tokens.description, isFenced);\n }\n return sections;\n };\n}\nfunction getFencer(fence) {\n if (typeof fence === 'string')\n return (source) => source.split(fence).length % 2 === 0;\n return fence;\n}\n"],"mappings":"AAAA,YAAY;;AACZA,MAAM,CAACC,cAAc,CAACC,OAAO,EAAE,YAAY,EAAE;EAAEC,KAAK,EAAE;AAAK,CAAC,CAAC;AAC7DD,OAAO,CAACE,OAAO,GAAGC,SAAS;AAC3B,MAAMC,KAAK,GAAG,mBAAmB;AACjC;AACA;AACA;AACA;AACA,SAASD,SAASA,CAAC;EAAEE,KAAK,GAAG;AAAO,CAAC,GAAG,CAAC,CAAC,EAAE;EACxC,MAAMC,MAAM,GAAGC,SAAS,CAACF,KAAK,CAAC;EAC/B,MAAMG,WAAW,GAAGA,CAACC,MAAM,EAAEC,QAAQ,KAAKJ,MAAM,CAACG,MAAM,CAAC,GAAG,CAACC,QAAQ,GAAGA,QAAQ;EAC/E,OAAO,SAASC,UAAUA,CAACF,MAAM,EAAE;IAC/B;IACA,MAAMG,QAAQ,GAAG,CAAC,EAAE,CAAC;IACrB,IAAIF,QAAQ,GAAG,KAAK;IACpB,KAAK,MAAMG,IAAI,IAAIJ,MAAM,EAAE;MACvB,IAAIL,KAAK,CAACU,IAAI,CAACD,IAAI,CAACE,MAAM,CAACC,WAAW,CAAC,IAAI,CAACN,QAAQ,EAAE;QAClDE,QAAQ,CAACK,IAAI,CAAC,CAACJ,IAAI,CAAC,CAAC;MACzB,CAAC,MACI;QACDD,QAAQ,CAACA,QAAQ,CAACM,MAAM,GAAG,CAAC,CAAC,CAACD,IAAI,CAACJ,IAAI,CAAC;MAC5C;MACAH,QAAQ,GAAGF,WAAW,CAACK,IAAI,CAACE,MAAM,CAACC,WAAW,EAAEN,QAAQ,CAAC;IAC7D;IACA,OAAOE,QAAQ;EACnB,CAAC;AACL;AACA,SAASL,SAASA,CAACF,KAAK,EAAE;EACtB,IAAI,OAAOA,KAAK,KAAK,QAAQ,EACzB,OAAQI,MAAM,IAAKA,MAAM,CAACU,KAAK,CAACd,KAAK,CAAC,CAACa,MAAM,GAAG,CAAC,KAAK,CAAC;EAC3D,OAAOb,KAAK;AAChB","ignoreList":[]}
|
||||
+24
@@ -0,0 +1,24 @@
|
||||
import { Line } from '../primitives.js';
|
||||
/**
|
||||
* Groups source lines in sections representing tags.
|
||||
* First section is a block description if present. Last section captures lines starting with
|
||||
* the last tag to the end of the block, including dangling closing marker.
|
||||
* @param {Line[]} block source lines making a single comment block
|
||||
*/
|
||||
export type Parser = (block: Line[]) => Line[][];
|
||||
/**
|
||||
* Predicate telling if string contains opening/closing escaping sequence
|
||||
* @param {string} source raw source line
|
||||
*/
|
||||
export type Fencer = (source: string) => boolean;
|
||||
/**
|
||||
* `Parser` configuration options
|
||||
*/
|
||||
export interface Options {
|
||||
fence: string | Fencer;
|
||||
}
|
||||
/**
|
||||
* Creates configured `Parser`
|
||||
* @param {Partial<Options>} options
|
||||
*/
|
||||
export default function getParser({ fence, }?: Partial<Options>): Parser;
|
||||
+52
@@ -0,0 +1,52 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = getParser;
|
||||
const primitives_js_1 = require("../primitives.cjs");
|
||||
const util_js_1 = require("../util.cjs");
|
||||
const block_parser_js_1 = require("./block-parser.cjs");
|
||||
const source_parser_js_1 = require("./source-parser.cjs");
|
||||
const spec_parser_js_1 = require("./spec-parser.cjs");
|
||||
const tag_js_1 = require("./tokenizers/tag.cjs");
|
||||
const type_js_1 = require("./tokenizers/type.cjs");
|
||||
const name_js_1 = require("./tokenizers/name.cjs");
|
||||
const description_js_1 = require("./tokenizers/description.cjs");
|
||||
function getParser({
|
||||
startLine = 0,
|
||||
fence = '```',
|
||||
spacing = 'compact',
|
||||
markers = primitives_js_1.Markers,
|
||||
tokenizers = [(0, tag_js_1.default)(), (0, type_js_1.default)(spacing), (0, name_js_1.default)(), (0, description_js_1.default)(spacing)]
|
||||
} = {}) {
|
||||
if (startLine < 0 || startLine % 1 > 0) throw new Error('Invalid startLine');
|
||||
const parseSource = (0, source_parser_js_1.default)({
|
||||
startLine,
|
||||
markers
|
||||
});
|
||||
const parseBlock = (0, block_parser_js_1.default)({
|
||||
fence
|
||||
});
|
||||
const parseSpec = (0, spec_parser_js_1.default)({
|
||||
tokenizers
|
||||
});
|
||||
const joinDescription = (0, description_js_1.getJoiner)(spacing);
|
||||
return function (source) {
|
||||
const blocks = [];
|
||||
for (const line of (0, util_js_1.splitLines)(source)) {
|
||||
const lines = parseSource(line);
|
||||
if (lines === null) continue;
|
||||
const sections = parseBlock(lines);
|
||||
const specs = sections.slice(1).map(parseSpec);
|
||||
blocks.push({
|
||||
description: joinDescription(sections[0], markers),
|
||||
tags: specs,
|
||||
source: lines,
|
||||
problems: specs.reduce((acc, spec) => acc.concat(spec.problems), [])
|
||||
});
|
||||
}
|
||||
return blocks;
|
||||
};
|
||||
}
|
||||
//# sourceMappingURL=index.cjs.map
|
||||
+1
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"index.cjs","names":["Object","defineProperty","exports","value","default","getParser","primitives_js_1","require","util_js_1","block_parser_js_1","source_parser_js_1","spec_parser_js_1","tag_js_1","type_js_1","name_js_1","description_js_1","startLine","fence","spacing","markers","Markers","tokenizers","Error","parseSource","parseBlock","parseSpec","joinDescription","getJoiner","source","blocks","line","splitLines","lines","sections","specs","slice","map","push","description","tags","problems","reduce","acc","spec","concat"],"sources":["index.js"],"sourcesContent":["\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.default = getParser;\nconst primitives_js_1 = require(\"../primitives.js\");\nconst util_js_1 = require(\"../util.js\");\nconst block_parser_js_1 = require(\"./block-parser.js\");\nconst source_parser_js_1 = require(\"./source-parser.js\");\nconst spec_parser_js_1 = require(\"./spec-parser.js\");\nconst tag_js_1 = require(\"./tokenizers/tag.js\");\nconst type_js_1 = require(\"./tokenizers/type.js\");\nconst name_js_1 = require(\"./tokenizers/name.js\");\nconst description_js_1 = require(\"./tokenizers/description.js\");\nfunction getParser({ startLine = 0, fence = '```', spacing = 'compact', markers = primitives_js_1.Markers, tokenizers = [\n (0, tag_js_1.default)(),\n (0, type_js_1.default)(spacing),\n (0, name_js_1.default)(),\n (0, description_js_1.default)(spacing),\n], } = {}) {\n if (startLine < 0 || startLine % 1 > 0)\n throw new Error('Invalid startLine');\n const parseSource = (0, source_parser_js_1.default)({ startLine, markers });\n const parseBlock = (0, block_parser_js_1.default)({ fence });\n const parseSpec = (0, spec_parser_js_1.default)({ tokenizers });\n const joinDescription = (0, description_js_1.getJoiner)(spacing);\n return function (source) {\n const blocks = [];\n for (const line of (0, util_js_1.splitLines)(source)) {\n const lines = parseSource(line);\n if (lines === null)\n continue;\n const sections = parseBlock(lines);\n const specs = sections.slice(1).map(parseSpec);\n blocks.push({\n description: joinDescription(sections[0], markers),\n tags: specs,\n source: lines,\n problems: specs.reduce((acc, spec) => acc.concat(spec.problems), []),\n });\n }\n return blocks;\n };\n}\n"],"mappings":"AAAA,YAAY;;AACZA,MAAM,CAACC,cAAc,CAACC,OAAO,EAAE,YAAY,EAAE;EAAEC,KAAK,EAAE;AAAK,CAAC,CAAC;AAC7DD,OAAO,CAACE,OAAO,GAAGC,SAAS;AAC3B,MAAMC,eAAe,GAAGC,OAAO,qBAAoB;AACnD,MAAMC,SAAS,GAAGD,OAAO,eAAc;AACvC,MAAME,iBAAiB,GAAGF,OAAO,sBAAqB;AACtD,MAAMG,kBAAkB,GAAGH,OAAO,uBAAsB;AACxD,MAAMI,gBAAgB,GAAGJ,OAAO,qBAAoB;AACpD,MAAMK,QAAQ,GAAGL,OAAO,wBAAuB;AAC/C,MAAMM,SAAS,GAAGN,OAAO,yBAAwB;AACjD,MAAMO,SAAS,GAAGP,OAAO,yBAAwB;AACjD,MAAMQ,gBAAgB,GAAGR,OAAO,gCAA+B;AAC/D,SAASF,SAASA,CAAC;EAAEW,SAAS,GAAG,CAAC;EAAEC,KAAK,GAAG,KAAK;EAAEC,OAAO,GAAG,SAAS;EAAEC,OAAO,GAAGb,eAAe,CAACc,OAAO;EAAEC,UAAU,GAAG,CACpH,CAAC,CAAC,EAAET,QAAQ,CAACR,OAAO,EAAE,CAAC,EACvB,CAAC,CAAC,EAAES,SAAS,CAACT,OAAO,EAAEc,OAAO,CAAC,EAC/B,CAAC,CAAC,EAAEJ,SAAS,CAACV,OAAO,EAAE,CAAC,EACxB,CAAC,CAAC,EAAEW,gBAAgB,CAACX,OAAO,EAAEc,OAAO,CAAC;AACvC,CAAC,GAAG,CAAC,CAAC,EAAE;EACP,IAAIF,SAAS,GAAG,CAAC,IAAIA,SAAS,GAAG,CAAC,GAAG,CAAC,EAClC,MAAM,IAAIM,KAAK,CAAC,mBAAmB,CAAC;EACxC,MAAMC,WAAW,GAAG,CAAC,CAAC,EAAEb,kBAAkB,CAACN,OAAO,EAAE;IAAEY,SAAS;IAAEG;EAAQ,CAAC,CAAC;EAC3E,MAAMK,UAAU,GAAG,CAAC,CAAC,EAAEf,iBAAiB,CAACL,OAAO,EAAE;IAAEa;EAAM,CAAC,CAAC;EAC5D,MAAMQ,SAAS,GAAG,CAAC,CAAC,EAAEd,gBAAgB,CAACP,OAAO,EAAE;IAAEiB;EAAW,CAAC,CAAC;EAC/D,MAAMK,eAAe,GAAG,CAAC,CAAC,EAAEX,gBAAgB,CAACY,SAAS,EAAET,OAAO,CAAC;EAChE,OAAO,UAAUU,MAAM,EAAE;IACrB,MAAMC,MAAM,GAAG,EAAE;IACjB,KAAK,MAAMC,IAAI,IAAI,CAAC,CAAC,EAAEtB,SAAS,CAACuB,UAAU,EAAEH,MAAM,CAAC,EAAE;MAClD,MAAMI,KAAK,GAAGT,WAAW,CAACO,IAAI,CAAC;MAC/B,IAAIE,KAAK,KAAK,IAAI,EACd;MACJ,MAAMC,QAAQ,GAAGT,UAAU,CAACQ,KAAK,CAAC;MAClC,MAAME,KAAK,GAAGD,QAAQ,CAACE,KAAK,CAAC,CAAC,CAAC,CAACC,GAAG,CAACX,SAAS,CAAC;MAC9CI,MAAM,CAACQ,IAAI,CAAC;QACRC,WAAW,EAAEZ,eAAe,CAACO,QAAQ,CAAC,CAAC,CAAC,EAAEd,OAAO,CAAC;QAClDoB,IAAI,EAAEL,KAAK;QACXN,MAAM,EAAEI,KAAK;QACbQ,QAAQ,EAAEN,KAAK,CAACO,MAAM,CAAC,CAACC,GAAG,EAAEC,IAAI,KAAKD,GAAG,CAACE,MAAM,CAACD,IAAI,CAACH,QAAQ,CAAC,EAAE,EAAE;MACvE,CAAC,CAAC;IACN;IACA,OAAOX,MAAM;EACjB,CAAC;AACL","ignoreList":[]}
|
||||
+11
@@ -0,0 +1,11 @@
|
||||
import { Block, BlockMarkers } from '../primitives.js';
|
||||
import { Tokenizer } from './tokenizers/index.js';
|
||||
export interface Options {
|
||||
startLine: number;
|
||||
fence: string;
|
||||
spacing: 'compact' | 'preserve';
|
||||
markers: BlockMarkers;
|
||||
tokenizers: Tokenizer[];
|
||||
}
|
||||
export type Parser = (source: string) => Block[];
|
||||
export default function getParser({ startLine, fence, spacing, markers, tokenizers, }?: Partial<Options>): Parser;
|
||||
+56
@@ -0,0 +1,56 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = getParser;
|
||||
const primitives_js_1 = require("../primitives.cjs");
|
||||
const util_js_1 = require("../util.cjs");
|
||||
function getParser({
|
||||
startLine = 0,
|
||||
markers = primitives_js_1.Markers
|
||||
} = {}) {
|
||||
let block = null;
|
||||
let num = startLine;
|
||||
return function parseSource(source) {
|
||||
let rest = source;
|
||||
const tokens = (0, util_js_1.seedTokens)();
|
||||
[tokens.lineEnd, rest] = (0, util_js_1.splitCR)(rest);
|
||||
[tokens.start, rest] = (0, util_js_1.splitSpace)(rest);
|
||||
if (block === null && rest.startsWith(markers.start) && !rest.startsWith(markers.nostart)) {
|
||||
block = [];
|
||||
tokens.delimiter = rest.slice(0, markers.start.length);
|
||||
rest = rest.slice(markers.start.length);
|
||||
[tokens.postDelimiter, rest] = (0, util_js_1.splitSpace)(rest);
|
||||
}
|
||||
if (block === null) {
|
||||
num++;
|
||||
return null;
|
||||
}
|
||||
const isClosed = rest.trimRight().endsWith(markers.end);
|
||||
if (tokens.delimiter === '' && rest.startsWith(markers.delim) && !rest.startsWith(markers.end)) {
|
||||
tokens.delimiter = markers.delim;
|
||||
rest = rest.slice(markers.delim.length);
|
||||
[tokens.postDelimiter, rest] = (0, util_js_1.splitSpace)(rest);
|
||||
}
|
||||
if (isClosed) {
|
||||
const trimmed = rest.trimRight();
|
||||
tokens.end = rest.slice(trimmed.length - markers.end.length);
|
||||
rest = trimmed.slice(0, -markers.end.length);
|
||||
}
|
||||
tokens.description = rest;
|
||||
block.push({
|
||||
number: num,
|
||||
source,
|
||||
tokens
|
||||
});
|
||||
num++;
|
||||
if (isClosed) {
|
||||
const result = block.slice();
|
||||
block = null;
|
||||
return result;
|
||||
}
|
||||
return null;
|
||||
};
|
||||
}
|
||||
//# sourceMappingURL=source-parser.cjs.map
|
||||
+1
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"source-parser.cjs","names":["Object","defineProperty","exports","value","default","getParser","primitives_js_1","require","util_js_1","startLine","markers","Markers","block","num","parseSource","source","rest","tokens","seedTokens","lineEnd","splitCR","start","splitSpace","startsWith","nostart","delimiter","slice","length","postDelimiter","isClosed","trimRight","endsWith","end","delim","trimmed","description","push","number","result"],"sources":["source-parser.js"],"sourcesContent":["\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.default = getParser;\nconst primitives_js_1 = require(\"../primitives.js\");\nconst util_js_1 = require(\"../util.js\");\nfunction getParser({ startLine = 0, markers = primitives_js_1.Markers, } = {}) {\n let block = null;\n let num = startLine;\n return function parseSource(source) {\n let rest = source;\n const tokens = (0, util_js_1.seedTokens)();\n [tokens.lineEnd, rest] = (0, util_js_1.splitCR)(rest);\n [tokens.start, rest] = (0, util_js_1.splitSpace)(rest);\n if (block === null &&\n rest.startsWith(markers.start) &&\n !rest.startsWith(markers.nostart)) {\n block = [];\n tokens.delimiter = rest.slice(0, markers.start.length);\n rest = rest.slice(markers.start.length);\n [tokens.postDelimiter, rest] = (0, util_js_1.splitSpace)(rest);\n }\n if (block === null) {\n num++;\n return null;\n }\n const isClosed = rest.trimRight().endsWith(markers.end);\n if (tokens.delimiter === '' &&\n rest.startsWith(markers.delim) &&\n !rest.startsWith(markers.end)) {\n tokens.delimiter = markers.delim;\n rest = rest.slice(markers.delim.length);\n [tokens.postDelimiter, rest] = (0, util_js_1.splitSpace)(rest);\n }\n if (isClosed) {\n const trimmed = rest.trimRight();\n tokens.end = rest.slice(trimmed.length - markers.end.length);\n rest = trimmed.slice(0, -markers.end.length);\n }\n tokens.description = rest;\n block.push({ number: num, source, tokens });\n num++;\n if (isClosed) {\n const result = block.slice();\n block = null;\n return result;\n }\n return null;\n };\n}\n"],"mappings":"AAAA,YAAY;;AACZA,MAAM,CAACC,cAAc,CAACC,OAAO,EAAE,YAAY,EAAE;EAAEC,KAAK,EAAE;AAAK,CAAC,CAAC;AAC7DD,OAAO,CAACE,OAAO,GAAGC,SAAS;AAC3B,MAAMC,eAAe,GAAGC,OAAO,qBAAoB;AACnD,MAAMC,SAAS,GAAGD,OAAO,eAAc;AACvC,SAASF,SAASA,CAAC;EAAEI,SAAS,GAAG,CAAC;EAAEC,OAAO,GAAGJ,eAAe,CAACK;AAAS,CAAC,GAAG,CAAC,CAAC,EAAE;EAC3E,IAAIC,KAAK,GAAG,IAAI;EAChB,IAAIC,GAAG,GAAGJ,SAAS;EACnB,OAAO,SAASK,WAAWA,CAACC,MAAM,EAAE;IAChC,IAAIC,IAAI,GAAGD,MAAM;IACjB,MAAME,MAAM,GAAG,CAAC,CAAC,EAAET,SAAS,CAACU,UAAU,EAAE,CAAC;IAC1C,CAACD,MAAM,CAACE,OAAO,EAAEH,IAAI,CAAC,GAAG,CAAC,CAAC,EAAER,SAAS,CAACY,OAAO,EAAEJ,IAAI,CAAC;IACrD,CAACC,MAAM,CAACI,KAAK,EAAEL,IAAI,CAAC,GAAG,CAAC,CAAC,EAAER,SAAS,CAACc,UAAU,EAAEN,IAAI,CAAC;IACtD,IAAIJ,KAAK,KAAK,IAAI,IACdI,IAAI,CAACO,UAAU,CAACb,OAAO,CAACW,KAAK,CAAC,IAC9B,CAACL,IAAI,CAACO,UAAU,CAACb,OAAO,CAACc,OAAO,CAAC,EAAE;MACnCZ,KAAK,GAAG,EAAE;MACVK,MAAM,CAACQ,SAAS,GAAGT,IAAI,CAACU,KAAK,CAAC,CAAC,EAAEhB,OAAO,CAACW,KAAK,CAACM,MAAM,CAAC;MACtDX,IAAI,GAAGA,IAAI,CAACU,KAAK,CAAChB,OAAO,CAACW,KAAK,CAACM,MAAM,CAAC;MACvC,CAACV,MAAM,CAACW,aAAa,EAAEZ,IAAI,CAAC,GAAG,CAAC,CAAC,EAAER,SAAS,CAACc,UAAU,EAAEN,IAAI,CAAC;IAClE;IACA,IAAIJ,KAAK,KAAK,IAAI,EAAE;MAChBC,GAAG,EAAE;MACL,OAAO,IAAI;IACf;IACA,MAAMgB,QAAQ,GAAGb,IAAI,CAACc,SAAS,CAAC,CAAC,CAACC,QAAQ,CAACrB,OAAO,CAACsB,GAAG,CAAC;IACvD,IAAIf,MAAM,CAACQ,SAAS,KAAK,EAAE,IACvBT,IAAI,CAACO,UAAU,CAACb,OAAO,CAACuB,KAAK,CAAC,IAC9B,CAACjB,IAAI,CAACO,UAAU,CAACb,OAAO,CAACsB,GAAG,CAAC,EAAE;MAC/Bf,MAAM,CAACQ,SAAS,GAAGf,OAAO,CAACuB,KAAK;MAChCjB,IAAI,GAAGA,IAAI,CAACU,KAAK,CAAChB,OAAO,CAACuB,KAAK,CAACN,MAAM,CAAC;MACvC,CAACV,MAAM,CAACW,aAAa,EAAEZ,IAAI,CAAC,GAAG,CAAC,CAAC,EAAER,SAAS,CAACc,UAAU,EAAEN,IAAI,CAAC;IAClE;IACA,IAAIa,QAAQ,EAAE;MACV,MAAMK,OAAO,GAAGlB,IAAI,CAACc,SAAS,CAAC,CAAC;MAChCb,MAAM,CAACe,GAAG,GAAGhB,IAAI,CAACU,KAAK,CAACQ,OAAO,CAACP,MAAM,GAAGjB,OAAO,CAACsB,GAAG,CAACL,MAAM,CAAC;MAC5DX,IAAI,GAAGkB,OAAO,CAACR,KAAK,CAAC,CAAC,EAAE,CAAChB,OAAO,CAACsB,GAAG,CAACL,MAAM,CAAC;IAChD;IACAV,MAAM,CAACkB,WAAW,GAAGnB,IAAI;IACzBJ,KAAK,CAACwB,IAAI,CAAC;MAAEC,MAAM,EAAExB,GAAG;MAAEE,MAAM;MAAEE;IAAO,CAAC,CAAC;IAC3CJ,GAAG,EAAE;IACL,IAAIgB,QAAQ,EAAE;MACV,MAAMS,MAAM,GAAG1B,KAAK,CAACc,KAAK,CAAC,CAAC;MAC5Bd,KAAK,GAAG,IAAI;MACZ,OAAO0B,MAAM;IACjB;IACA,OAAO,IAAI;EACf,CAAC;AACL","ignoreList":[]}
|
||||
+7
@@ -0,0 +1,7 @@
|
||||
import { Line, BlockMarkers } from '../primitives.js';
|
||||
export interface Options {
|
||||
startLine: number;
|
||||
markers: BlockMarkers;
|
||||
}
|
||||
export type Parser = (source: string) => Line[] | null;
|
||||
export default function getParser({ startLine, markers, }?: Partial<Options>): Parser;
|
||||
+23
@@ -0,0 +1,23 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = getParser;
|
||||
const util_js_1 = require("../util.cjs");
|
||||
function getParser({
|
||||
tokenizers
|
||||
}) {
|
||||
return function parseSpec(source) {
|
||||
var _a;
|
||||
let spec = (0, util_js_1.seedSpec)({
|
||||
source
|
||||
});
|
||||
for (const tokenize of tokenizers) {
|
||||
spec = tokenize(spec);
|
||||
if ((_a = spec.problems[spec.problems.length - 1]) === null || _a === void 0 ? void 0 : _a.critical) break;
|
||||
}
|
||||
return spec;
|
||||
};
|
||||
}
|
||||
//# sourceMappingURL=spec-parser.cjs.map
|
||||
+1
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"spec-parser.cjs","names":["Object","defineProperty","exports","value","default","getParser","util_js_1","require","tokenizers","parseSpec","source","_a","spec","seedSpec","tokenize","problems","length","critical"],"sources":["spec-parser.js"],"sourcesContent":["\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.default = getParser;\nconst util_js_1 = require(\"../util.js\");\nfunction getParser({ tokenizers }) {\n return function parseSpec(source) {\n var _a;\n let spec = (0, util_js_1.seedSpec)({ source });\n for (const tokenize of tokenizers) {\n spec = tokenize(spec);\n if ((_a = spec.problems[spec.problems.length - 1]) === null || _a === void 0 ? void 0 : _a.critical)\n break;\n }\n return spec;\n };\n}\n"],"mappings":"AAAA,YAAY;;AACZA,MAAM,CAACC,cAAc,CAACC,OAAO,EAAE,YAAY,EAAE;EAAEC,KAAK,EAAE;AAAK,CAAC,CAAC;AAC7DD,OAAO,CAACE,OAAO,GAAGC,SAAS;AAC3B,MAAMC,SAAS,GAAGC,OAAO,eAAc;AACvC,SAASF,SAASA,CAAC;EAAEG;AAAW,CAAC,EAAE;EAC/B,OAAO,SAASC,SAASA,CAACC,MAAM,EAAE;IAC9B,IAAIC,EAAE;IACN,IAAIC,IAAI,GAAG,CAAC,CAAC,EAAEN,SAAS,CAACO,QAAQ,EAAE;MAAEH;IAAO,CAAC,CAAC;IAC9C,KAAK,MAAMI,QAAQ,IAAIN,UAAU,EAAE;MAC/BI,IAAI,GAAGE,QAAQ,CAACF,IAAI,CAAC;MACrB,IAAI,CAACD,EAAE,GAAGC,IAAI,CAACG,QAAQ,CAACH,IAAI,CAACG,QAAQ,CAACC,MAAM,GAAG,CAAC,CAAC,MAAM,IAAI,IAAIL,EAAE,KAAK,KAAK,CAAC,GAAG,KAAK,CAAC,GAAGA,EAAE,CAACM,QAAQ,EAC/F;IACR;IACA,OAAOL,IAAI;EACf,CAAC;AACL","ignoreList":[]}
|
||||
+7
@@ -0,0 +1,7 @@
|
||||
import { Line, Spec } from '../primitives.js';
|
||||
import { Tokenizer } from './tokenizers/index.js';
|
||||
export type Parser = (source: Line[]) => Spec;
|
||||
export interface Options {
|
||||
tokenizers: Tokenizer[];
|
||||
}
|
||||
export default function getParser({ tokenizers }: Options): Parser;
|
||||
+51
@@ -0,0 +1,51 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = descriptionTokenizer;
|
||||
exports.getJoiner = getJoiner;
|
||||
const primitives_js_1 = require("../../primitives.cjs");
|
||||
/**
|
||||
* Makes no changes to `spec.lines[].tokens` but joins them into `spec.description`
|
||||
* following given spacing srtategy
|
||||
* @param {Spacing} spacing tells how to handle the whitespace
|
||||
* @param {BlockMarkers} markers tells how to handle comment block delimitation
|
||||
*/
|
||||
function descriptionTokenizer(spacing = 'compact', markers = primitives_js_1.Markers) {
|
||||
const join = getJoiner(spacing);
|
||||
return spec => {
|
||||
spec.description = join(spec.source, markers);
|
||||
return spec;
|
||||
};
|
||||
}
|
||||
function getJoiner(spacing) {
|
||||
if (spacing === 'compact') return compactJoiner;
|
||||
if (spacing === 'preserve') return preserveJoiner;
|
||||
return spacing;
|
||||
}
|
||||
function compactJoiner(lines, markers = primitives_js_1.Markers) {
|
||||
return lines.map(({
|
||||
tokens: {
|
||||
description
|
||||
}
|
||||
}) => description.trim()).filter(description => description !== '').join(' ');
|
||||
}
|
||||
const lineNo = (num, {
|
||||
tokens
|
||||
}, i) => tokens.type === '' ? num : i;
|
||||
const getDescription = ({
|
||||
tokens
|
||||
}) => (tokens.delimiter === '' ? tokens.start : tokens.postDelimiter.slice(1)) + tokens.description;
|
||||
function preserveJoiner(lines, markers = primitives_js_1.Markers) {
|
||||
if (lines.length === 0) return '';
|
||||
// skip the opening line with no description
|
||||
if (lines[0].tokens.description === '' && lines[0].tokens.delimiter === markers.start) lines = lines.slice(1);
|
||||
// skip the closing line with no description
|
||||
const lastLine = lines[lines.length - 1];
|
||||
if (lastLine !== undefined && lastLine.tokens.description === '' && lastLine.tokens.end.endsWith(markers.end)) lines = lines.slice(0, -1);
|
||||
// description starts at the last line of type definition
|
||||
lines = lines.slice(lines.reduce(lineNo, 0));
|
||||
return lines.map(getDescription).join('\n');
|
||||
}
|
||||
//# sourceMappingURL=description.cjs.map
|
||||
+1
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"description.cjs","names":["Object","defineProperty","exports","value","default","descriptionTokenizer","getJoiner","primitives_js_1","require","spacing","markers","Markers","join","spec","description","source","compactJoiner","preserveJoiner","lines","map","tokens","trim","filter","lineNo","num","i","type","getDescription","delimiter","start","postDelimiter","slice","length","lastLine","undefined","end","endsWith","reduce"],"sources":["description.js"],"sourcesContent":["\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.default = descriptionTokenizer;\nexports.getJoiner = getJoiner;\nconst primitives_js_1 = require(\"../../primitives.js\");\n/**\n * Makes no changes to `spec.lines[].tokens` but joins them into `spec.description`\n * following given spacing srtategy\n * @param {Spacing} spacing tells how to handle the whitespace\n * @param {BlockMarkers} markers tells how to handle comment block delimitation\n */\nfunction descriptionTokenizer(spacing = 'compact', markers = primitives_js_1.Markers) {\n const join = getJoiner(spacing);\n return (spec) => {\n spec.description = join(spec.source, markers);\n return spec;\n };\n}\nfunction getJoiner(spacing) {\n if (spacing === 'compact')\n return compactJoiner;\n if (spacing === 'preserve')\n return preserveJoiner;\n return spacing;\n}\nfunction compactJoiner(lines, markers = primitives_js_1.Markers) {\n return lines\n .map(({ tokens: { description } }) => description.trim())\n .filter((description) => description !== '')\n .join(' ');\n}\nconst lineNo = (num, { tokens }, i) => tokens.type === '' ? num : i;\nconst getDescription = ({ tokens }) => (tokens.delimiter === '' ? tokens.start : tokens.postDelimiter.slice(1)) +\n tokens.description;\nfunction preserveJoiner(lines, markers = primitives_js_1.Markers) {\n if (lines.length === 0)\n return '';\n // skip the opening line with no description\n if (lines[0].tokens.description === '' &&\n lines[0].tokens.delimiter === markers.start)\n lines = lines.slice(1);\n // skip the closing line with no description\n const lastLine = lines[lines.length - 1];\n if (lastLine !== undefined &&\n lastLine.tokens.description === '' &&\n lastLine.tokens.end.endsWith(markers.end))\n lines = lines.slice(0, -1);\n // description starts at the last line of type definition\n lines = lines.slice(lines.reduce(lineNo, 0));\n return lines.map(getDescription).join('\\n');\n}\n"],"mappings":"AAAA,YAAY;;AACZA,MAAM,CAACC,cAAc,CAACC,OAAO,EAAE,YAAY,EAAE;EAAEC,KAAK,EAAE;AAAK,CAAC,CAAC;AAC7DD,OAAO,CAACE,OAAO,GAAGC,oBAAoB;AACtCH,OAAO,CAACI,SAAS,GAAGA,SAAS;AAC7B,MAAMC,eAAe,GAAGC,OAAO,wBAAuB;AACtD;AACA;AACA;AACA;AACA;AACA;AACA,SAASH,oBAAoBA,CAACI,OAAO,GAAG,SAAS,EAAEC,OAAO,GAAGH,eAAe,CAACI,OAAO,EAAE;EAClF,MAAMC,IAAI,GAAGN,SAAS,CAACG,OAAO,CAAC;EAC/B,OAAQI,IAAI,IAAK;IACbA,IAAI,CAACC,WAAW,GAAGF,IAAI,CAACC,IAAI,CAACE,MAAM,EAAEL,OAAO,CAAC;IAC7C,OAAOG,IAAI;EACf,CAAC;AACL;AACA,SAASP,SAASA,CAACG,OAAO,EAAE;EACxB,IAAIA,OAAO,KAAK,SAAS,EACrB,OAAOO,aAAa;EACxB,IAAIP,OAAO,KAAK,UAAU,EACtB,OAAOQ,cAAc;EACzB,OAAOR,OAAO;AAClB;AACA,SAASO,aAAaA,CAACE,KAAK,EAAER,OAAO,GAAGH,eAAe,CAACI,OAAO,EAAE;EAC7D,OAAOO,KAAK,CACPC,GAAG,CAAC,CAAC;IAAEC,MAAM,EAAE;MAAEN;IAAY;EAAE,CAAC,KAAKA,WAAW,CAACO,IAAI,CAAC,CAAC,CAAC,CACxDC,MAAM,CAAER,WAAW,IAAKA,WAAW,KAAK,EAAE,CAAC,CAC3CF,IAAI,CAAC,GAAG,CAAC;AAClB;AACA,MAAMW,MAAM,GAAGA,CAACC,GAAG,EAAE;EAAEJ;AAAO,CAAC,EAAEK,CAAC,KAAKL,MAAM,CAACM,IAAI,KAAK,EAAE,GAAGF,GAAG,GAAGC,CAAC;AACnE,MAAME,cAAc,GAAGA,CAAC;EAAEP;AAAO,CAAC,KAAK,CAACA,MAAM,CAACQ,SAAS,KAAK,EAAE,GAAGR,MAAM,CAACS,KAAK,GAAGT,MAAM,CAACU,aAAa,CAACC,KAAK,CAAC,CAAC,CAAC,IAC1GX,MAAM,CAACN,WAAW;AACtB,SAASG,cAAcA,CAACC,KAAK,EAAER,OAAO,GAAGH,eAAe,CAACI,OAAO,EAAE;EAC9D,IAAIO,KAAK,CAACc,MAAM,KAAK,CAAC,EAClB,OAAO,EAAE;EACb;EACA,IAAId,KAAK,CAAC,CAAC,CAAC,CAACE,MAAM,CAACN,WAAW,KAAK,EAAE,IAClCI,KAAK,CAAC,CAAC,CAAC,CAACE,MAAM,CAACQ,SAAS,KAAKlB,OAAO,CAACmB,KAAK,EAC3CX,KAAK,GAAGA,KAAK,CAACa,KAAK,CAAC,CAAC,CAAC;EAC1B;EACA,MAAME,QAAQ,GAAGf,KAAK,CAACA,KAAK,CAACc,MAAM,GAAG,CAAC,CAAC;EACxC,IAAIC,QAAQ,KAAKC,SAAS,IACtBD,QAAQ,CAACb,MAAM,CAACN,WAAW,KAAK,EAAE,IAClCmB,QAAQ,CAACb,MAAM,CAACe,GAAG,CAACC,QAAQ,CAAC1B,OAAO,CAACyB,GAAG,CAAC,EACzCjB,KAAK,GAAGA,KAAK,CAACa,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;EAC9B;EACAb,KAAK,GAAGA,KAAK,CAACa,KAAK,CAACb,KAAK,CAACmB,MAAM,CAACd,MAAM,EAAE,CAAC,CAAC,CAAC;EAC5C,OAAOL,KAAK,CAACC,GAAG,CAACQ,cAAc,CAAC,CAACf,IAAI,CAAC,IAAI,CAAC;AAC/C","ignoreList":[]}
|
||||
+20
@@ -0,0 +1,20 @@
|
||||
import { Line, BlockMarkers, Markers } from '../../primitives.js';
|
||||
import { Tokenizer } from './index.js';
|
||||
/**
|
||||
* Walks over provided lines joining description token into a single string.
|
||||
* */
|
||||
export type Joiner = (lines: Line[], markers?: BlockMarkers) => string;
|
||||
/**
|
||||
* Shortcut for standard Joiners
|
||||
* compact - strip surrounding whitespace and concat lines using a single string
|
||||
* preserve - preserves original whitespace and line breaks as is
|
||||
*/
|
||||
export type Spacing = 'compact' | 'preserve' | Joiner;
|
||||
/**
|
||||
* Makes no changes to `spec.lines[].tokens` but joins them into `spec.description`
|
||||
* following given spacing srtategy
|
||||
* @param {Spacing} spacing tells how to handle the whitespace
|
||||
* @param {BlockMarkers} markers tells how to handle comment block delimitation
|
||||
*/
|
||||
export default function descriptionTokenizer(spacing?: Spacing, markers?: typeof Markers): Tokenizer;
|
||||
export declare function getJoiner(spacing: Spacing): Joiner;
|
||||
+6
@@ -0,0 +1,6 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
//# sourceMappingURL=index.cjs.map
|
||||
+1
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"index.cjs","names":["Object","defineProperty","exports","value"],"sources":["index.js"],"sourcesContent":["\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\n"],"mappings":"AAAA,YAAY;;AACZA,MAAM,CAACC,cAAc,CAACC,OAAO,EAAE,YAAY,EAAE;EAAEC,KAAK,EAAE;AAAK,CAAC,CAAC","ignoreList":[]}
|
||||
+7
@@ -0,0 +1,7 @@
|
||||
import { Spec } from '../../primitives.js';
|
||||
/**
|
||||
* Splits `spect.lines[].token.description` into other tokens,
|
||||
* and populates the spec.{tag, name, type, description}. Invoked in a chaing
|
||||
* with other tokens, operations listed above can be moved to separate tokenizers
|
||||
*/
|
||||
export type Tokenizer = (spec: Spec) => Spec;
|
||||
+143
@@ -0,0 +1,143 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = nameTokenizer;
|
||||
const util_js_1 = require("../../util.cjs");
|
||||
const isQuoted = s => s && s.startsWith('"') && s.endsWith('"');
|
||||
/**
|
||||
* Splits remaining `spec.lines[].tokens.description` into `name` and `descriptions` tokens,
|
||||
* and populates the `spec.name`
|
||||
*/
|
||||
function nameTokenizer() {
|
||||
const typeEnd = (num, {
|
||||
tokens
|
||||
}, i) => tokens.type === '' ? num : i;
|
||||
return spec => {
|
||||
// look for the name starting in the line where {type} ends
|
||||
let finalTypeLine = spec.source.reduce(typeEnd, 0);
|
||||
let tokens;
|
||||
if (spec.type) {
|
||||
do {
|
||||
({
|
||||
tokens
|
||||
} = spec.source[finalTypeLine]);
|
||||
if (tokens.description.trim()) {
|
||||
break;
|
||||
}
|
||||
finalTypeLine++;
|
||||
} while (spec.source[finalTypeLine]);
|
||||
} else {
|
||||
({
|
||||
tokens
|
||||
} = spec.source[finalTypeLine]);
|
||||
}
|
||||
const source = tokens.description.trimStart();
|
||||
const quotedGroups = source.split('"');
|
||||
// if it starts with quoted group, assume it is a literal
|
||||
if (quotedGroups.length > 1 && quotedGroups[0] === '' && quotedGroups.length % 2 === 1) {
|
||||
spec.name = quotedGroups[1];
|
||||
tokens.name = `"${quotedGroups[1]}"`;
|
||||
[tokens.postName, tokens.description] = (0, util_js_1.splitSpace)(source.slice(tokens.name.length));
|
||||
return spec;
|
||||
}
|
||||
let brackets = 0;
|
||||
let name = '';
|
||||
let optional = false;
|
||||
let defaultValue;
|
||||
// assume name is non-space string or anything wrapped into brackets
|
||||
for (const ch of source) {
|
||||
if (brackets === 0 && (0, util_js_1.isSpace)(ch)) break;
|
||||
if (ch === '[') brackets++;
|
||||
if (ch === ']') brackets--;
|
||||
name += ch;
|
||||
}
|
||||
if (brackets !== 0) {
|
||||
spec.problems.push({
|
||||
code: 'spec:name:unpaired-brackets',
|
||||
message: 'unpaired brackets',
|
||||
line: spec.source[0].number,
|
||||
critical: true
|
||||
});
|
||||
return spec;
|
||||
}
|
||||
const nameToken = name;
|
||||
if (name[0] === '[' && name[name.length - 1] === ']') {
|
||||
optional = true;
|
||||
name = name.slice(1, -1);
|
||||
const parts = name.split('=');
|
||||
name = parts[0].trim();
|
||||
if (parts[1] !== undefined) defaultValue = parts.slice(1).join('=').trim();
|
||||
if (name === '') {
|
||||
spec.problems.push({
|
||||
code: 'spec:name:empty-name',
|
||||
message: 'empty name',
|
||||
line: spec.source[0].number,
|
||||
critical: true
|
||||
});
|
||||
return spec;
|
||||
}
|
||||
if (defaultValue === '') {
|
||||
spec.problems.push({
|
||||
code: 'spec:name:empty-default',
|
||||
message: 'empty default value',
|
||||
line: spec.source[0].number,
|
||||
critical: true
|
||||
});
|
||||
return spec;
|
||||
}
|
||||
// has "=" and is not a string, except for "=>"
|
||||
if (!isQuoted(defaultValue) && /=(?!>)/.test(defaultValue)) {
|
||||
spec.problems.push({
|
||||
code: 'spec:name:invalid-default',
|
||||
message: 'invalid default value syntax',
|
||||
line: spec.source[0].number,
|
||||
critical: true
|
||||
});
|
||||
return spec;
|
||||
}
|
||||
}
|
||||
if (!optional) {
|
||||
const eqIndex = name.search(/=(?!>)/);
|
||||
if (eqIndex !== -1) {
|
||||
defaultValue = name.slice(eqIndex + 1).trim();
|
||||
name = name.slice(0, eqIndex).trim();
|
||||
if (name === '') {
|
||||
spec.problems.push({
|
||||
code: 'spec:name:empty-name',
|
||||
message: 'empty name',
|
||||
line: spec.source[0].number,
|
||||
critical: true
|
||||
});
|
||||
return spec;
|
||||
}
|
||||
if (defaultValue === '') {
|
||||
spec.problems.push({
|
||||
code: 'spec:name:empty-default',
|
||||
message: 'empty default value',
|
||||
line: spec.source[0].number,
|
||||
critical: true
|
||||
});
|
||||
return spec;
|
||||
}
|
||||
if (!isQuoted(defaultValue) && /=(?!>)/.test(defaultValue)) {
|
||||
spec.problems.push({
|
||||
code: 'spec:name:invalid-default',
|
||||
message: 'invalid default value syntax',
|
||||
line: spec.source[0].number,
|
||||
critical: true
|
||||
});
|
||||
return spec;
|
||||
}
|
||||
}
|
||||
}
|
||||
spec.optional = optional;
|
||||
spec.name = name;
|
||||
tokens.name = nameToken;
|
||||
if (defaultValue !== undefined) spec.default = defaultValue;
|
||||
[tokens.postName, tokens.description] = (0, util_js_1.splitSpace)(source.slice(tokens.name.length));
|
||||
return spec;
|
||||
};
|
||||
}
|
||||
//# sourceMappingURL=name.cjs.map
|
||||
+1
File diff suppressed because one or more lines are too long
+6
@@ -0,0 +1,6 @@
|
||||
import { Tokenizer } from './index.js';
|
||||
/**
|
||||
* Splits remaining `spec.lines[].tokens.description` into `name` and `descriptions` tokens,
|
||||
* and populates the `spec.name`
|
||||
*/
|
||||
export default function nameTokenizer(): Tokenizer;
|
||||
+36
@@ -0,0 +1,36 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = tagTokenizer;
|
||||
/**
|
||||
* Splits the `@prefix` from remaining `Spec.lines[].token.description` into the `tag` token,
|
||||
* and populates `spec.tag`
|
||||
*/
|
||||
function tagTokenizer() {
|
||||
return spec => {
|
||||
const {
|
||||
tokens
|
||||
} = spec.source[0];
|
||||
const match = tokens.description.match(/\s*(@(\S+))(\s*)/);
|
||||
if (match === null) {
|
||||
spec.problems.push({
|
||||
code: 'spec:tag:prefix',
|
||||
message: 'tag should start with "@" symbol',
|
||||
line: spec.source[0].number,
|
||||
critical: true
|
||||
});
|
||||
return spec;
|
||||
}
|
||||
if (match[1].includes('/')) {
|
||||
return spec;
|
||||
}
|
||||
tokens.tag = match[1];
|
||||
tokens.postTag = match[3];
|
||||
tokens.description = tokens.description.slice(match[0].length);
|
||||
spec.tag = match[2];
|
||||
return spec;
|
||||
};
|
||||
}
|
||||
//# sourceMappingURL=tag.cjs.map
|
||||
+1
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"tag.cjs","names":["Object","defineProperty","exports","value","default","tagTokenizer","spec","tokens","source","match","description","problems","push","code","message","line","number","critical","includes","tag","postTag","slice","length"],"sources":["tag.js"],"sourcesContent":["\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.default = tagTokenizer;\n/**\n * Splits the `@prefix` from remaining `Spec.lines[].token.description` into the `tag` token,\n * and populates `spec.tag`\n */\nfunction tagTokenizer() {\n return (spec) => {\n const { tokens } = spec.source[0];\n const match = tokens.description.match(/\\s*(@(\\S+))(\\s*)/);\n if (match === null) {\n spec.problems.push({\n code: 'spec:tag:prefix',\n message: 'tag should start with \"@\" symbol',\n line: spec.source[0].number,\n critical: true,\n });\n return spec;\n }\n if (match[1].includes('/')) {\n return spec;\n }\n tokens.tag = match[1];\n tokens.postTag = match[3];\n tokens.description = tokens.description.slice(match[0].length);\n spec.tag = match[2];\n return spec;\n };\n}\n"],"mappings":"AAAA,YAAY;;AACZA,MAAM,CAACC,cAAc,CAACC,OAAO,EAAE,YAAY,EAAE;EAAEC,KAAK,EAAE;AAAK,CAAC,CAAC;AAC7DD,OAAO,CAACE,OAAO,GAAGC,YAAY;AAC9B;AACA;AACA;AACA;AACA,SAASA,YAAYA,CAAA,EAAG;EACpB,OAAQC,IAAI,IAAK;IACb,MAAM;MAAEC;IAAO,CAAC,GAAGD,IAAI,CAACE,MAAM,CAAC,CAAC,CAAC;IACjC,MAAMC,KAAK,GAAGF,MAAM,CAACG,WAAW,CAACD,KAAK,CAAC,kBAAkB,CAAC;IAC1D,IAAIA,KAAK,KAAK,IAAI,EAAE;MAChBH,IAAI,CAACK,QAAQ,CAACC,IAAI,CAAC;QACfC,IAAI,EAAE,iBAAiB;QACvBC,OAAO,EAAE,kCAAkC;QAC3CC,IAAI,EAAET,IAAI,CAACE,MAAM,CAAC,CAAC,CAAC,CAACQ,MAAM;QAC3BC,QAAQ,EAAE;MACd,CAAC,CAAC;MACF,OAAOX,IAAI;IACf;IACA,IAAIG,KAAK,CAAC,CAAC,CAAC,CAACS,QAAQ,CAAC,GAAG,CAAC,EAAE;MACxB,OAAOZ,IAAI;IACf;IACAC,MAAM,CAACY,GAAG,GAAGV,KAAK,CAAC,CAAC,CAAC;IACrBF,MAAM,CAACa,OAAO,GAAGX,KAAK,CAAC,CAAC,CAAC;IACzBF,MAAM,CAACG,WAAW,GAAGH,MAAM,CAACG,WAAW,CAACW,KAAK,CAACZ,KAAK,CAAC,CAAC,CAAC,CAACa,MAAM,CAAC;IAC9DhB,IAAI,CAACa,GAAG,GAAGV,KAAK,CAAC,CAAC,CAAC;IACnB,OAAOH,IAAI;EACf,CAAC;AACL","ignoreList":[]}
|
||||
+6
@@ -0,0 +1,6 @@
|
||||
import { Tokenizer } from './index.js';
|
||||
/**
|
||||
* Splits the `@prefix` from remaining `Spec.lines[].token.description` into the `tag` token,
|
||||
* and populates `spec.tag`
|
||||
*/
|
||||
export default function tagTokenizer(): Tokenizer;
|
||||
+75
@@ -0,0 +1,75 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = typeTokenizer;
|
||||
const util_js_1 = require("../../util.cjs");
|
||||
/**
|
||||
* Sets splits remaining `Spec.lines[].tokens.description` into `type` and `description`
|
||||
* tokens and populates Spec.type`
|
||||
*
|
||||
* @param {Spacing} spacing tells how to deal with a whitespace
|
||||
* for type values going over multiple lines
|
||||
*/
|
||||
function typeTokenizer(spacing = 'compact') {
|
||||
const join = getJoiner(spacing);
|
||||
return spec => {
|
||||
let curlies = 0;
|
||||
let lines = [];
|
||||
let descriptionBegun = false;
|
||||
let firstTypeIteration = true;
|
||||
for (const {
|
||||
tokens
|
||||
} of spec.source.values()) {
|
||||
let type = '';
|
||||
if (!descriptionBegun && tokens.description.trim()) {
|
||||
descriptionBegun = true;
|
||||
} else if (!descriptionBegun) {
|
||||
continue;
|
||||
}
|
||||
if (firstTypeIteration && tokens.description[0] !== '{') return spec;
|
||||
firstTypeIteration = false;
|
||||
for (const ch of tokens.description) {
|
||||
if (ch === '{') curlies++;
|
||||
if (ch === '}') curlies--;
|
||||
type += ch;
|
||||
if (curlies === 0) break;
|
||||
}
|
||||
lines.push([tokens, type]);
|
||||
if (curlies === 0) break;
|
||||
}
|
||||
if (!descriptionBegun) {
|
||||
return spec;
|
||||
}
|
||||
if (curlies !== 0) {
|
||||
spec.problems.push({
|
||||
code: 'spec:type:unpaired-curlies',
|
||||
message: 'unpaired curlies',
|
||||
line: spec.source[0].number,
|
||||
critical: true
|
||||
});
|
||||
return spec;
|
||||
}
|
||||
const parts = [];
|
||||
const offset = lines[0][0].postDelimiter.length;
|
||||
for (const [i, [tokens, type]] of lines.entries()) {
|
||||
tokens.type = type;
|
||||
if (i > 0) {
|
||||
tokens.type = tokens.postDelimiter.slice(offset) + type;
|
||||
tokens.postDelimiter = tokens.postDelimiter.slice(0, offset);
|
||||
}
|
||||
[tokens.postType, tokens.description] = (0, util_js_1.splitSpace)(tokens.description.slice(type.length));
|
||||
parts.push(tokens.type);
|
||||
}
|
||||
parts[0] = parts[0].slice(1);
|
||||
parts[parts.length - 1] = parts[parts.length - 1].slice(0, -1);
|
||||
spec.type = join(parts);
|
||||
return spec;
|
||||
};
|
||||
}
|
||||
const trim = x => x.trim();
|
||||
function getJoiner(spacing) {
|
||||
if (spacing === 'compact') return t => t.map(trim).join('');else if (spacing === 'preserve') return t => t.join('\n');else return spacing;
|
||||
}
|
||||
//# sourceMappingURL=type.cjs.map
|
||||
+1
File diff suppressed because one or more lines are too long
+27
@@ -0,0 +1,27 @@
|
||||
import { Tokenizer } from './index.js';
|
||||
/**
|
||||
* Joiner is a function taking collected type token string parts,
|
||||
* and joining them together. In most of the cases this will be
|
||||
* a single piece like {type-name}, but type may go over multipe line
|
||||
* ```
|
||||
* @tag {function(
|
||||
* number,
|
||||
* string
|
||||
* )}
|
||||
* ```
|
||||
*/
|
||||
export type Joiner = (parts: string[]) => string;
|
||||
/**
|
||||
* Shortcut for standard Joiners
|
||||
* compact - trim surrounding space, replace line breaks with a single space
|
||||
* preserve - concat as is
|
||||
*/
|
||||
export type Spacing = 'compact' | 'preserve' | Joiner;
|
||||
/**
|
||||
* Sets splits remaining `Spec.lines[].tokens.description` into `type` and `description`
|
||||
* tokens and populates Spec.type`
|
||||
*
|
||||
* @param {Spacing} spacing tells how to deal with a whitespace
|
||||
* for type values going over multiple lines
|
||||
*/
|
||||
export default function typeTokenizer(spacing?: Spacing): Tokenizer;
|
||||
+15
@@ -0,0 +1,15 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.Markers = void 0;
|
||||
/** @deprecated */
|
||||
var Markers;
|
||||
(function (Markers) {
|
||||
Markers["start"] = "/**";
|
||||
Markers["nostart"] = "/***";
|
||||
Markers["delim"] = "*";
|
||||
Markers["end"] = "*/";
|
||||
})(Markers || (exports.Markers = Markers = {}));
|
||||
//# sourceMappingURL=primitives.cjs.map
|
||||
+1
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"primitives.cjs","names":["Object","defineProperty","exports","value","Markers"],"sources":["primitives.js"],"sourcesContent":["\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Markers = void 0;\n/** @deprecated */\nvar Markers;\n(function (Markers) {\n Markers[\"start\"] = \"/**\";\n Markers[\"nostart\"] = \"/***\";\n Markers[\"delim\"] = \"*\";\n Markers[\"end\"] = \"*/\";\n})(Markers || (exports.Markers = Markers = {}));\n"],"mappings":"AAAA,YAAY;;AACZA,MAAM,CAACC,cAAc,CAACC,OAAO,EAAE,YAAY,EAAE;EAAEC,KAAK,EAAE;AAAK,CAAC,CAAC;AAC7DD,OAAO,CAACE,OAAO,GAAG,KAAK,CAAC;AACxB;AACA,IAAIA,OAAO;AACX,CAAC,UAAUA,OAAO,EAAE;EAChBA,OAAO,CAAC,OAAO,CAAC,GAAG,KAAK;EACxBA,OAAO,CAAC,SAAS,CAAC,GAAG,MAAM;EAC3BA,OAAO,CAAC,OAAO,CAAC,GAAG,GAAG;EACtBA,OAAO,CAAC,KAAK,CAAC,GAAG,IAAI;AACzB,CAAC,EAAEA,OAAO,KAAKF,OAAO,CAACE,OAAO,GAAGA,OAAO,GAAG,CAAC,CAAC,CAAC,CAAC","ignoreList":[]}
|
||||
+54
@@ -0,0 +1,54 @@
|
||||
/** @deprecated */
|
||||
export declare enum Markers {
|
||||
start = "/**",
|
||||
nostart = "/***",
|
||||
delim = "*",
|
||||
end = "*/"
|
||||
}
|
||||
export interface BlockMarkers {
|
||||
start: string;
|
||||
nostart: string;
|
||||
delim: string;
|
||||
end: string;
|
||||
}
|
||||
export interface Block {
|
||||
description: string;
|
||||
tags: Spec[];
|
||||
source: Line[];
|
||||
problems: Problem[];
|
||||
}
|
||||
export interface Spec {
|
||||
tag: string;
|
||||
name: string;
|
||||
default?: string;
|
||||
type: string;
|
||||
optional: boolean;
|
||||
description: string;
|
||||
problems: Problem[];
|
||||
source: Line[];
|
||||
}
|
||||
export interface Line {
|
||||
number: number;
|
||||
source: string;
|
||||
tokens: Tokens;
|
||||
}
|
||||
export interface Tokens {
|
||||
start: string;
|
||||
delimiter: string;
|
||||
postDelimiter: string;
|
||||
tag: string;
|
||||
postTag: string;
|
||||
name: string;
|
||||
postName: string;
|
||||
type: string;
|
||||
postType: string;
|
||||
description: string;
|
||||
end: string;
|
||||
lineEnd: string;
|
||||
}
|
||||
export interface Problem {
|
||||
code: 'unhandled' | 'custom' | 'source:startline' | 'spec:tag:prefix' | 'spec:type:unpaired-curlies' | 'spec:name:unpaired-brackets' | 'spec:name:empty-name' | 'spec:name:invalid-default' | 'spec:name:empty-default';
|
||||
message: string;
|
||||
line: number;
|
||||
critical: boolean;
|
||||
}
|
||||
+15
@@ -0,0 +1,15 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = getStringifier;
|
||||
function join(tokens) {
|
||||
return tokens.start + tokens.delimiter + tokens.postDelimiter + tokens.tag + tokens.postTag + tokens.type + tokens.postType + tokens.name + tokens.postName + tokens.description + tokens.end + tokens.lineEnd;
|
||||
}
|
||||
function getStringifier() {
|
||||
return block => block.source.map(({
|
||||
tokens
|
||||
}) => join(tokens)).join('\n');
|
||||
}
|
||||
//# sourceMappingURL=index.cjs.map
|
||||
+1
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"index.cjs","names":["Object","defineProperty","exports","value","default","getStringifier","join","tokens","start","delimiter","postDelimiter","tag","postTag","type","postType","name","postName","description","end","lineEnd","block","source","map"],"sources":["index.js"],"sourcesContent":["\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.default = getStringifier;\nfunction join(tokens) {\n return (tokens.start +\n tokens.delimiter +\n tokens.postDelimiter +\n tokens.tag +\n tokens.postTag +\n tokens.type +\n tokens.postType +\n tokens.name +\n tokens.postName +\n tokens.description +\n tokens.end +\n tokens.lineEnd);\n}\nfunction getStringifier() {\n return (block) => block.source.map(({ tokens }) => join(tokens)).join('\\n');\n}\n"],"mappings":"AAAA,YAAY;;AACZA,MAAM,CAACC,cAAc,CAACC,OAAO,EAAE,YAAY,EAAE;EAAEC,KAAK,EAAE;AAAK,CAAC,CAAC;AAC7DD,OAAO,CAACE,OAAO,GAAGC,cAAc;AAChC,SAASC,IAAIA,CAACC,MAAM,EAAE;EAClB,OAAQA,MAAM,CAACC,KAAK,GAChBD,MAAM,CAACE,SAAS,GAChBF,MAAM,CAACG,aAAa,GACpBH,MAAM,CAACI,GAAG,GACVJ,MAAM,CAACK,OAAO,GACdL,MAAM,CAACM,IAAI,GACXN,MAAM,CAACO,QAAQ,GACfP,MAAM,CAACQ,IAAI,GACXR,MAAM,CAACS,QAAQ,GACfT,MAAM,CAACU,WAAW,GAClBV,MAAM,CAACW,GAAG,GACVX,MAAM,CAACY,OAAO;AACtB;AACA,SAASd,cAAcA,CAAA,EAAG;EACtB,OAAQe,KAAK,IAAKA,KAAK,CAACC,MAAM,CAACC,GAAG,CAAC,CAAC;IAAEf;EAAO,CAAC,KAAKD,IAAI,CAACC,MAAM,CAAC,CAAC,CAACD,IAAI,CAAC,IAAI,CAAC;AAC/E","ignoreList":[]}
|
||||
+3
@@ -0,0 +1,3 @@
|
||||
import { Block } from '../primitives.js';
|
||||
export type Stringifier = (block: Block) => string;
|
||||
export default function getStringifier(): Stringifier;
|
||||
+56
@@ -0,0 +1,56 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = inspect;
|
||||
const util_js_1 = require("../util.cjs");
|
||||
const zeroWidth = {
|
||||
line: 0,
|
||||
start: 0,
|
||||
delimiter: 0,
|
||||
postDelimiter: 0,
|
||||
tag: 0,
|
||||
postTag: 0,
|
||||
name: 0,
|
||||
postName: 0,
|
||||
type: 0,
|
||||
postType: 0,
|
||||
description: 0,
|
||||
end: 0,
|
||||
lineEnd: 0
|
||||
};
|
||||
const headers = {
|
||||
lineEnd: 'CR'
|
||||
};
|
||||
const fields = Object.keys(zeroWidth);
|
||||
const repr = x => (0, util_js_1.isSpace)(x) ? `{${x.length}}` : x;
|
||||
const frame = line => '|' + line.join('|') + '|';
|
||||
const align = (width, tokens) => Object.keys(tokens).map(k => repr(tokens[k]).padEnd(width[k]));
|
||||
function inspect({
|
||||
source
|
||||
}) {
|
||||
var _a, _b;
|
||||
if (source.length === 0) return '';
|
||||
const width = Object.assign({}, zeroWidth);
|
||||
for (const f of fields) width[f] = ((_a = headers[f]) !== null && _a !== void 0 ? _a : f).length;
|
||||
for (const {
|
||||
number,
|
||||
tokens
|
||||
} of source) {
|
||||
width.line = Math.max(width.line, number.toString().length);
|
||||
for (const k in tokens) width[k] = Math.max(width[k], repr(tokens[k]).length);
|
||||
}
|
||||
const lines = [[], []];
|
||||
for (const f of fields) lines[0].push(((_b = headers[f]) !== null && _b !== void 0 ? _b : f).padEnd(width[f]));
|
||||
for (const f of fields) lines[1].push('-'.padEnd(width[f], '-'));
|
||||
for (const {
|
||||
number,
|
||||
tokens
|
||||
} of source) {
|
||||
const line = number.toString().padStart(width.line);
|
||||
lines.push([line, ...align(width, tokens)]);
|
||||
}
|
||||
return lines.map(frame).join('\n');
|
||||
}
|
||||
//# sourceMappingURL=inspect.cjs.map
|
||||
+1
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"inspect.cjs","names":["Object","defineProperty","exports","value","default","inspect","util_js_1","require","zeroWidth","line","start","delimiter","postDelimiter","tag","postTag","name","postName","type","postType","description","end","lineEnd","headers","fields","keys","repr","x","isSpace","length","frame","join","align","width","tokens","map","k","padEnd","source","_a","_b","assign","f","number","Math","max","toString","lines","push","padStart"],"sources":["inspect.js"],"sourcesContent":["\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.default = inspect;\nconst util_js_1 = require(\"../util.js\");\nconst zeroWidth = {\n line: 0,\n start: 0,\n delimiter: 0,\n postDelimiter: 0,\n tag: 0,\n postTag: 0,\n name: 0,\n postName: 0,\n type: 0,\n postType: 0,\n description: 0,\n end: 0,\n lineEnd: 0,\n};\nconst headers = { lineEnd: 'CR' };\nconst fields = Object.keys(zeroWidth);\nconst repr = (x) => ((0, util_js_1.isSpace)(x) ? `{${x.length}}` : x);\nconst frame = (line) => '|' + line.join('|') + '|';\nconst align = (width, tokens) => Object.keys(tokens).map((k) => repr(tokens[k]).padEnd(width[k]));\nfunction inspect({ source }) {\n var _a, _b;\n if (source.length === 0)\n return '';\n const width = Object.assign({}, zeroWidth);\n for (const f of fields)\n width[f] = ((_a = headers[f]) !== null && _a !== void 0 ? _a : f).length;\n for (const { number, tokens } of source) {\n width.line = Math.max(width.line, number.toString().length);\n for (const k in tokens)\n width[k] = Math.max(width[k], repr(tokens[k]).length);\n }\n const lines = [[], []];\n for (const f of fields)\n lines[0].push(((_b = headers[f]) !== null && _b !== void 0 ? _b : f).padEnd(width[f]));\n for (const f of fields)\n lines[1].push('-'.padEnd(width[f], '-'));\n for (const { number, tokens } of source) {\n const line = number.toString().padStart(width.line);\n lines.push([line, ...align(width, tokens)]);\n }\n return lines.map(frame).join('\\n');\n}\n"],"mappings":"AAAA,YAAY;;AACZA,MAAM,CAACC,cAAc,CAACC,OAAO,EAAE,YAAY,EAAE;EAAEC,KAAK,EAAE;AAAK,CAAC,CAAC;AAC7DD,OAAO,CAACE,OAAO,GAAGC,OAAO;AACzB,MAAMC,SAAS,GAAGC,OAAO,eAAc;AACvC,MAAMC,SAAS,GAAG;EACdC,IAAI,EAAE,CAAC;EACPC,KAAK,EAAE,CAAC;EACRC,SAAS,EAAE,CAAC;EACZC,aAAa,EAAE,CAAC;EAChBC,GAAG,EAAE,CAAC;EACNC,OAAO,EAAE,CAAC;EACVC,IAAI,EAAE,CAAC;EACPC,QAAQ,EAAE,CAAC;EACXC,IAAI,EAAE,CAAC;EACPC,QAAQ,EAAE,CAAC;EACXC,WAAW,EAAE,CAAC;EACdC,GAAG,EAAE,CAAC;EACNC,OAAO,EAAE;AACb,CAAC;AACD,MAAMC,OAAO,GAAG;EAAED,OAAO,EAAE;AAAK,CAAC;AACjC,MAAME,MAAM,GAAGvB,MAAM,CAACwB,IAAI,CAAChB,SAAS,CAAC;AACrC,MAAMiB,IAAI,GAAIC,CAAC,IAAM,CAAC,CAAC,EAAEpB,SAAS,CAACqB,OAAO,EAAED,CAAC,CAAC,GAAG,IAAIA,CAAC,CAACE,MAAM,GAAG,GAAGF,CAAE;AACrE,MAAMG,KAAK,GAAIpB,IAAI,IAAK,GAAG,GAAGA,IAAI,CAACqB,IAAI,CAAC,GAAG,CAAC,GAAG,GAAG;AAClD,MAAMC,KAAK,GAAGA,CAACC,KAAK,EAAEC,MAAM,KAAKjC,MAAM,CAACwB,IAAI,CAACS,MAAM,CAAC,CAACC,GAAG,CAAEC,CAAC,IAAKV,IAAI,CAACQ,MAAM,CAACE,CAAC,CAAC,CAAC,CAACC,MAAM,CAACJ,KAAK,CAACG,CAAC,CAAC,CAAC,CAAC;AACjG,SAAS9B,OAAOA,CAAC;EAAEgC;AAAO,CAAC,EAAE;EACzB,IAAIC,EAAE,EAAEC,EAAE;EACV,IAAIF,MAAM,CAACT,MAAM,KAAK,CAAC,EACnB,OAAO,EAAE;EACb,MAAMI,KAAK,GAAGhC,MAAM,CAACwC,MAAM,CAAC,CAAC,CAAC,EAAEhC,SAAS,CAAC;EAC1C,KAAK,MAAMiC,CAAC,IAAIlB,MAAM,EAClBS,KAAK,CAACS,CAAC,CAAC,GAAG,CAAC,CAACH,EAAE,GAAGhB,OAAO,CAACmB,CAAC,CAAC,MAAM,IAAI,IAAIH,EAAE,KAAK,KAAK,CAAC,GAAGA,EAAE,GAAGG,CAAC,EAAEb,MAAM;EAC5E,KAAK,MAAM;IAAEc,MAAM;IAAET;EAAO,CAAC,IAAII,MAAM,EAAE;IACrCL,KAAK,CAACvB,IAAI,GAAGkC,IAAI,CAACC,GAAG,CAACZ,KAAK,CAACvB,IAAI,EAAEiC,MAAM,CAACG,QAAQ,CAAC,CAAC,CAACjB,MAAM,CAAC;IAC3D,KAAK,MAAMO,CAAC,IAAIF,MAAM,EAClBD,KAAK,CAACG,CAAC,CAAC,GAAGQ,IAAI,CAACC,GAAG,CAACZ,KAAK,CAACG,CAAC,CAAC,EAAEV,IAAI,CAACQ,MAAM,CAACE,CAAC,CAAC,CAAC,CAACP,MAAM,CAAC;EAC7D;EACA,MAAMkB,KAAK,GAAG,CAAC,EAAE,EAAE,EAAE,CAAC;EACtB,KAAK,MAAML,CAAC,IAAIlB,MAAM,EAClBuB,KAAK,CAAC,CAAC,CAAC,CAACC,IAAI,CAAC,CAAC,CAACR,EAAE,GAAGjB,OAAO,CAACmB,CAAC,CAAC,MAAM,IAAI,IAAIF,EAAE,KAAK,KAAK,CAAC,GAAGA,EAAE,GAAGE,CAAC,EAAEL,MAAM,CAACJ,KAAK,CAACS,CAAC,CAAC,CAAC,CAAC;EAC1F,KAAK,MAAMA,CAAC,IAAIlB,MAAM,EAClBuB,KAAK,CAAC,CAAC,CAAC,CAACC,IAAI,CAAC,GAAG,CAACX,MAAM,CAACJ,KAAK,CAACS,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC;EAC5C,KAAK,MAAM;IAAEC,MAAM;IAAET;EAAO,CAAC,IAAII,MAAM,EAAE;IACrC,MAAM5B,IAAI,GAAGiC,MAAM,CAACG,QAAQ,CAAC,CAAC,CAACG,QAAQ,CAAChB,KAAK,CAACvB,IAAI,CAAC;IACnDqC,KAAK,CAACC,IAAI,CAAC,CAACtC,IAAI,EAAE,GAAGsB,KAAK,CAACC,KAAK,EAAEC,MAAM,CAAC,CAAC,CAAC;EAC/C;EACA,OAAOa,KAAK,CAACZ,GAAG,CAACL,KAAK,CAAC,CAACC,IAAI,CAAC,IAAI,CAAC;AACtC","ignoreList":[]}
|
||||
+2
@@ -0,0 +1,2 @@
|
||||
import { Block } from '../primitives.js';
|
||||
export default function inspect({ source }: Block): string;
|
||||
+104
@@ -0,0 +1,104 @@
|
||||
"use strict";
|
||||
|
||||
var __rest = this && this.__rest || function (s, e) {
|
||||
var t = {};
|
||||
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) t[p] = s[p];
|
||||
if (s != null && typeof Object.getOwnPropertySymbols === "function") for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
|
||||
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) t[p[i]] = s[p[i]];
|
||||
}
|
||||
return t;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = align;
|
||||
const primitives_js_1 = require("../primitives.cjs");
|
||||
const util_js_1 = require("../util.cjs");
|
||||
const zeroWidth = {
|
||||
start: 0,
|
||||
tag: 0,
|
||||
type: 0,
|
||||
name: 0
|
||||
};
|
||||
const getWidth = (markers = primitives_js_1.Markers) => (w, {
|
||||
tokens: t
|
||||
}) => ({
|
||||
start: t.delimiter === markers.start ? t.start.length : w.start,
|
||||
tag: Math.max(w.tag, t.tag.length),
|
||||
type: Math.max(w.type, t.type.length),
|
||||
name: Math.max(w.name, t.name.length)
|
||||
});
|
||||
const space = len => ''.padStart(len, ' ');
|
||||
function align(markers = primitives_js_1.Markers) {
|
||||
let intoTags = false;
|
||||
let w;
|
||||
function update(line) {
|
||||
const tokens = Object.assign({}, line.tokens);
|
||||
if (tokens.tag !== '') intoTags = true;
|
||||
const isEmpty = tokens.tag === '' && tokens.name === '' && tokens.type === '' && tokens.description === '';
|
||||
// dangling '*/'
|
||||
if (tokens.end === markers.end && isEmpty) {
|
||||
tokens.start = space(w.start + 1);
|
||||
return Object.assign(Object.assign({}, line), {
|
||||
tokens
|
||||
});
|
||||
}
|
||||
switch (tokens.delimiter) {
|
||||
case markers.start:
|
||||
tokens.start = space(w.start);
|
||||
break;
|
||||
case markers.delim:
|
||||
tokens.start = space(w.start + 1);
|
||||
break;
|
||||
default:
|
||||
tokens.delimiter = '';
|
||||
tokens.start = space(w.start + 2);
|
||||
// compensate delimiter
|
||||
}
|
||||
if (!intoTags) {
|
||||
tokens.postDelimiter = tokens.description === '' ? '' : ' ';
|
||||
return Object.assign(Object.assign({}, line), {
|
||||
tokens
|
||||
});
|
||||
}
|
||||
const nothingAfter = {
|
||||
delim: false,
|
||||
tag: false,
|
||||
type: false,
|
||||
name: false
|
||||
};
|
||||
if (tokens.description === '') {
|
||||
nothingAfter.name = true;
|
||||
tokens.postName = '';
|
||||
if (tokens.name === '') {
|
||||
nothingAfter.type = true;
|
||||
tokens.postType = '';
|
||||
if (tokens.type === '') {
|
||||
nothingAfter.tag = true;
|
||||
tokens.postTag = '';
|
||||
if (tokens.tag === '') {
|
||||
nothingAfter.delim = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
tokens.postDelimiter = nothingAfter.delim ? '' : ' ';
|
||||
if (!nothingAfter.tag) tokens.postTag = space(w.tag - tokens.tag.length + 1);
|
||||
if (!nothingAfter.type) tokens.postType = space(w.type - tokens.type.length + 1);
|
||||
if (!nothingAfter.name) tokens.postName = space(w.name - tokens.name.length + 1);
|
||||
return Object.assign(Object.assign({}, line), {
|
||||
tokens
|
||||
});
|
||||
}
|
||||
return _a => {
|
||||
var {
|
||||
source
|
||||
} = _a,
|
||||
fields = __rest(_a, ["source"]);
|
||||
w = source.reduce(getWidth(markers), Object.assign({}, zeroWidth));
|
||||
return (0, util_js_1.rewireSource)(Object.assign(Object.assign({}, fields), {
|
||||
source: source.map(update)
|
||||
}));
|
||||
};
|
||||
}
|
||||
//# sourceMappingURL=align.cjs.map
|
||||
+1
File diff suppressed because one or more lines are too long
+3
@@ -0,0 +1,3 @@
|
||||
import { Transform } from './index.js';
|
||||
import { Markers } from '../primitives.js';
|
||||
export default function align(markers?: typeof Markers): Transform;
|
||||
+35
@@ -0,0 +1,35 @@
|
||||
"use strict";
|
||||
|
||||
var __rest = this && this.__rest || function (s, e) {
|
||||
var t = {};
|
||||
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) t[p] = s[p];
|
||||
if (s != null && typeof Object.getOwnPropertySymbols === "function") for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
|
||||
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) t[p[i]] = s[p[i]];
|
||||
}
|
||||
return t;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = crlf;
|
||||
const util_js_1 = require("../util.cjs");
|
||||
const order = ['end', 'description', 'postType', 'type', 'postName', 'name', 'postTag', 'tag', 'postDelimiter', 'delimiter', 'start'];
|
||||
function crlf(ending) {
|
||||
function update(line) {
|
||||
return Object.assign(Object.assign({}, line), {
|
||||
tokens: Object.assign(Object.assign({}, line.tokens), {
|
||||
lineEnd: ending === 'LF' ? '' : '\r'
|
||||
})
|
||||
});
|
||||
}
|
||||
return _a => {
|
||||
var {
|
||||
source
|
||||
} = _a,
|
||||
fields = __rest(_a, ["source"]);
|
||||
return (0, util_js_1.rewireSource)(Object.assign(Object.assign({}, fields), {
|
||||
source: source.map(update)
|
||||
}));
|
||||
};
|
||||
}
|
||||
//# sourceMappingURL=crlf.cjs.map
|
||||
+1
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"crlf.cjs","names":["__rest","s","e","t","p","Object","prototype","hasOwnProperty","call","indexOf","getOwnPropertySymbols","i","length","propertyIsEnumerable","defineProperty","exports","value","default","crlf","util_js_1","require","order","ending","update","line","assign","tokens","lineEnd","_a","source","fields","rewireSource","map"],"sources":["crlf.js"],"sourcesContent":["\"use strict\";\nvar __rest = (this && this.__rest) || function (s, e) {\n var t = {};\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)\n t[p] = s[p];\n if (s != null && typeof Object.getOwnPropertySymbols === \"function\")\n for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {\n if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))\n t[p[i]] = s[p[i]];\n }\n return t;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.default = crlf;\nconst util_js_1 = require(\"../util.js\");\nconst order = [\n 'end',\n 'description',\n 'postType',\n 'type',\n 'postName',\n 'name',\n 'postTag',\n 'tag',\n 'postDelimiter',\n 'delimiter',\n 'start',\n];\nfunction crlf(ending) {\n function update(line) {\n return Object.assign(Object.assign({}, line), { tokens: Object.assign(Object.assign({}, line.tokens), { lineEnd: ending === 'LF' ? '' : '\\r' }) });\n }\n return (_a) => {\n var { source } = _a, fields = __rest(_a, [\"source\"]);\n return (0, util_js_1.rewireSource)(Object.assign(Object.assign({}, fields), { source: source.map(update) }));\n };\n}\n"],"mappings":"AAAA,YAAY;;AACZ,IAAIA,MAAM,GAAI,IAAI,IAAI,IAAI,CAACA,MAAM,IAAK,UAAUC,CAAC,EAAEC,CAAC,EAAE;EAClD,IAAIC,CAAC,GAAG,CAAC,CAAC;EACV,KAAK,IAAIC,CAAC,IAAIH,CAAC,EAAE,IAAII,MAAM,CAACC,SAAS,CAACC,cAAc,CAACC,IAAI,CAACP,CAAC,EAAEG,CAAC,CAAC,IAAIF,CAAC,CAACO,OAAO,CAACL,CAAC,CAAC,GAAG,CAAC,EAC/ED,CAAC,CAACC,CAAC,CAAC,GAAGH,CAAC,CAACG,CAAC,CAAC;EACf,IAAIH,CAAC,IAAI,IAAI,IAAI,OAAOI,MAAM,CAACK,qBAAqB,KAAK,UAAU,EAC/D,KAAK,IAAIC,CAAC,GAAG,CAAC,EAAEP,CAAC,GAAGC,MAAM,CAACK,qBAAqB,CAACT,CAAC,CAAC,EAAEU,CAAC,GAAGP,CAAC,CAACQ,MAAM,EAAED,CAAC,EAAE,EAAE;IACpE,IAAIT,CAAC,CAACO,OAAO,CAACL,CAAC,CAACO,CAAC,CAAC,CAAC,GAAG,CAAC,IAAIN,MAAM,CAACC,SAAS,CAACO,oBAAoB,CAACL,IAAI,CAACP,CAAC,EAAEG,CAAC,CAACO,CAAC,CAAC,CAAC,EAC1ER,CAAC,CAACC,CAAC,CAACO,CAAC,CAAC,CAAC,GAAGV,CAAC,CAACG,CAAC,CAACO,CAAC,CAAC,CAAC;EACzB;EACJ,OAAOR,CAAC;AACZ,CAAC;AACDE,MAAM,CAACS,cAAc,CAACC,OAAO,EAAE,YAAY,EAAE;EAAEC,KAAK,EAAE;AAAK,CAAC,CAAC;AAC7DD,OAAO,CAACE,OAAO,GAAGC,IAAI;AACtB,MAAMC,SAAS,GAAGC,OAAO,eAAc;AACvC,MAAMC,KAAK,GAAG,CACV,KAAK,EACL,aAAa,EACb,UAAU,EACV,MAAM,EACN,UAAU,EACV,MAAM,EACN,SAAS,EACT,KAAK,EACL,eAAe,EACf,WAAW,EACX,OAAO,CACV;AACD,SAASH,IAAIA,CAACI,MAAM,EAAE;EAClB,SAASC,MAAMA,CAACC,IAAI,EAAE;IAClB,OAAOnB,MAAM,CAACoB,MAAM,CAACpB,MAAM,CAACoB,MAAM,CAAC,CAAC,CAAC,EAAED,IAAI,CAAC,EAAE;MAAEE,MAAM,EAAErB,MAAM,CAACoB,MAAM,CAACpB,MAAM,CAACoB,MAAM,CAAC,CAAC,CAAC,EAAED,IAAI,CAACE,MAAM,CAAC,EAAE;QAAEC,OAAO,EAAEL,MAAM,KAAK,IAAI,GAAG,EAAE,GAAG;MAAK,CAAC;IAAE,CAAC,CAAC;EACtJ;EACA,OAAQM,EAAE,IAAK;IACX,IAAI;QAAEC;MAAO,CAAC,GAAGD,EAAE;MAAEE,MAAM,GAAG9B,MAAM,CAAC4B,EAAE,EAAE,CAAC,QAAQ,CAAC,CAAC;IACpD,OAAO,CAAC,CAAC,EAAET,SAAS,CAACY,YAAY,EAAE1B,MAAM,CAACoB,MAAM,CAACpB,MAAM,CAACoB,MAAM,CAAC,CAAC,CAAC,EAAEK,MAAM,CAAC,EAAE;MAAED,MAAM,EAAEA,MAAM,CAACG,GAAG,CAACT,MAAM;IAAE,CAAC,CAAC,CAAC;EAChH,CAAC;AACL","ignoreList":[]}
|
||||
+3
@@ -0,0 +1,3 @@
|
||||
import { Transform } from './index.js';
|
||||
export type Ending = 'LF' | 'CRLF';
|
||||
export default function crlf(ending: Ending): Transform;
|
||||
+45
@@ -0,0 +1,45 @@
|
||||
"use strict";
|
||||
|
||||
var __rest = this && this.__rest || function (s, e) {
|
||||
var t = {};
|
||||
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) t[p] = s[p];
|
||||
if (s != null && typeof Object.getOwnPropertySymbols === "function") for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
|
||||
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) t[p[i]] = s[p[i]];
|
||||
}
|
||||
return t;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = indent;
|
||||
const util_js_1 = require("../util.cjs");
|
||||
const pull = offset => str => str.slice(offset);
|
||||
const push = offset => {
|
||||
const space = ''.padStart(offset, ' ');
|
||||
return str => str + space;
|
||||
};
|
||||
function indent(pos) {
|
||||
let shift;
|
||||
const pad = start => {
|
||||
if (shift === undefined) {
|
||||
const offset = pos - start.length;
|
||||
shift = offset > 0 ? push(offset) : pull(-offset);
|
||||
}
|
||||
return shift(start);
|
||||
};
|
||||
const update = line => Object.assign(Object.assign({}, line), {
|
||||
tokens: Object.assign(Object.assign({}, line.tokens), {
|
||||
start: pad(line.tokens.start)
|
||||
})
|
||||
});
|
||||
return _a => {
|
||||
var {
|
||||
source
|
||||
} = _a,
|
||||
fields = __rest(_a, ["source"]);
|
||||
return (0, util_js_1.rewireSource)(Object.assign(Object.assign({}, fields), {
|
||||
source: source.map(update)
|
||||
}));
|
||||
};
|
||||
}
|
||||
//# sourceMappingURL=indent.cjs.map
|
||||
+1
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"indent.cjs","names":["__rest","s","e","t","p","Object","prototype","hasOwnProperty","call","indexOf","getOwnPropertySymbols","i","length","propertyIsEnumerable","defineProperty","exports","value","default","indent","util_js_1","require","pull","offset","str","slice","push","space","padStart","pos","shift","pad","start","undefined","update","line","assign","tokens","_a","source","fields","rewireSource","map"],"sources":["indent.js"],"sourcesContent":["\"use strict\";\nvar __rest = (this && this.__rest) || function (s, e) {\n var t = {};\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)\n t[p] = s[p];\n if (s != null && typeof Object.getOwnPropertySymbols === \"function\")\n for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {\n if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))\n t[p[i]] = s[p[i]];\n }\n return t;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.default = indent;\nconst util_js_1 = require(\"../util.js\");\nconst pull = (offset) => (str) => str.slice(offset);\nconst push = (offset) => {\n const space = ''.padStart(offset, ' ');\n return (str) => str + space;\n};\nfunction indent(pos) {\n let shift;\n const pad = (start) => {\n if (shift === undefined) {\n const offset = pos - start.length;\n shift = offset > 0 ? push(offset) : pull(-offset);\n }\n return shift(start);\n };\n const update = (line) => (Object.assign(Object.assign({}, line), { tokens: Object.assign(Object.assign({}, line.tokens), { start: pad(line.tokens.start) }) }));\n return (_a) => {\n var { source } = _a, fields = __rest(_a, [\"source\"]);\n return (0, util_js_1.rewireSource)(Object.assign(Object.assign({}, fields), { source: source.map(update) }));\n };\n}\n"],"mappings":"AAAA,YAAY;;AACZ,IAAIA,MAAM,GAAI,IAAI,IAAI,IAAI,CAACA,MAAM,IAAK,UAAUC,CAAC,EAAEC,CAAC,EAAE;EAClD,IAAIC,CAAC,GAAG,CAAC,CAAC;EACV,KAAK,IAAIC,CAAC,IAAIH,CAAC,EAAE,IAAII,MAAM,CAACC,SAAS,CAACC,cAAc,CAACC,IAAI,CAACP,CAAC,EAAEG,CAAC,CAAC,IAAIF,CAAC,CAACO,OAAO,CAACL,CAAC,CAAC,GAAG,CAAC,EAC/ED,CAAC,CAACC,CAAC,CAAC,GAAGH,CAAC,CAACG,CAAC,CAAC;EACf,IAAIH,CAAC,IAAI,IAAI,IAAI,OAAOI,MAAM,CAACK,qBAAqB,KAAK,UAAU,EAC/D,KAAK,IAAIC,CAAC,GAAG,CAAC,EAAEP,CAAC,GAAGC,MAAM,CAACK,qBAAqB,CAACT,CAAC,CAAC,EAAEU,CAAC,GAAGP,CAAC,CAACQ,MAAM,EAAED,CAAC,EAAE,EAAE;IACpE,IAAIT,CAAC,CAACO,OAAO,CAACL,CAAC,CAACO,CAAC,CAAC,CAAC,GAAG,CAAC,IAAIN,MAAM,CAACC,SAAS,CAACO,oBAAoB,CAACL,IAAI,CAACP,CAAC,EAAEG,CAAC,CAACO,CAAC,CAAC,CAAC,EAC1ER,CAAC,CAACC,CAAC,CAACO,CAAC,CAAC,CAAC,GAAGV,CAAC,CAACG,CAAC,CAACO,CAAC,CAAC,CAAC;EACzB;EACJ,OAAOR,CAAC;AACZ,CAAC;AACDE,MAAM,CAACS,cAAc,CAACC,OAAO,EAAE,YAAY,EAAE;EAAEC,KAAK,EAAE;AAAK,CAAC,CAAC;AAC7DD,OAAO,CAACE,OAAO,GAAGC,MAAM;AACxB,MAAMC,SAAS,GAAGC,OAAO,eAAc;AACvC,MAAMC,IAAI,GAAIC,MAAM,IAAMC,GAAG,IAAKA,GAAG,CAACC,KAAK,CAACF,MAAM,CAAC;AACnD,MAAMG,IAAI,GAAIH,MAAM,IAAK;EACrB,MAAMI,KAAK,GAAG,EAAE,CAACC,QAAQ,CAACL,MAAM,EAAE,GAAG,CAAC;EACtC,OAAQC,GAAG,IAAKA,GAAG,GAAGG,KAAK;AAC/B,CAAC;AACD,SAASR,MAAMA,CAACU,GAAG,EAAE;EACjB,IAAIC,KAAK;EACT,MAAMC,GAAG,GAAIC,KAAK,IAAK;IACnB,IAAIF,KAAK,KAAKG,SAAS,EAAE;MACrB,MAAMV,MAAM,GAAGM,GAAG,GAAGG,KAAK,CAACnB,MAAM;MACjCiB,KAAK,GAAGP,MAAM,GAAG,CAAC,GAAGG,IAAI,CAACH,MAAM,CAAC,GAAGD,IAAI,CAAC,CAACC,MAAM,CAAC;IACrD;IACA,OAAOO,KAAK,CAACE,KAAK,CAAC;EACvB,CAAC;EACD,MAAME,MAAM,GAAIC,IAAI,IAAM7B,MAAM,CAAC8B,MAAM,CAAC9B,MAAM,CAAC8B,MAAM,CAAC,CAAC,CAAC,EAAED,IAAI,CAAC,EAAE;IAAEE,MAAM,EAAE/B,MAAM,CAAC8B,MAAM,CAAC9B,MAAM,CAAC8B,MAAM,CAAC,CAAC,CAAC,EAAED,IAAI,CAACE,MAAM,CAAC,EAAE;MAAEL,KAAK,EAAED,GAAG,CAACI,IAAI,CAACE,MAAM,CAACL,KAAK;IAAE,CAAC;EAAE,CAAC,CAAE;EAC/J,OAAQM,EAAE,IAAK;IACX,IAAI;QAAEC;MAAO,CAAC,GAAGD,EAAE;MAAEE,MAAM,GAAGvC,MAAM,CAACqC,EAAE,EAAE,CAAC,QAAQ,CAAC,CAAC;IACpD,OAAO,CAAC,CAAC,EAAElB,SAAS,CAACqB,YAAY,EAAEnC,MAAM,CAAC8B,MAAM,CAAC9B,MAAM,CAAC8B,MAAM,CAAC,CAAC,CAAC,EAAEI,MAAM,CAAC,EAAE;MAAED,MAAM,EAAEA,MAAM,CAACG,GAAG,CAACR,MAAM;IAAE,CAAC,CAAC,CAAC;EAChH,CAAC;AACL","ignoreList":[]}
|
||||
+2
@@ -0,0 +1,2 @@
|
||||
import { Transform } from './index.js';
|
||||
export default function indent(pos: number): Transform;
|
||||
+10
@@ -0,0 +1,10 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.flow = flow;
|
||||
function flow(...transforms) {
|
||||
return block => transforms.reduce((block, t) => t(block), block);
|
||||
}
|
||||
//# sourceMappingURL=index.cjs.map
|
||||
+1
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"index.cjs","names":["Object","defineProperty","exports","value","flow","transforms","block","reduce","t"],"sources":["index.js"],"sourcesContent":["\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.flow = flow;\nfunction flow(...transforms) {\n return (block) => transforms.reduce((block, t) => t(block), block);\n}\n"],"mappings":"AAAA,YAAY;;AACZA,MAAM,CAACC,cAAc,CAACC,OAAO,EAAE,YAAY,EAAE;EAAEC,KAAK,EAAE;AAAK,CAAC,CAAC;AAC7DD,OAAO,CAACE,IAAI,GAAGA,IAAI;AACnB,SAASA,IAAIA,CAAC,GAAGC,UAAU,EAAE;EACzB,OAAQC,KAAK,IAAKD,UAAU,CAACE,MAAM,CAAC,CAACD,KAAK,EAAEE,CAAC,KAAKA,CAAC,CAACF,KAAK,CAAC,EAAEA,KAAK,CAAC;AACtE","ignoreList":[]}
|
||||
+3
@@ -0,0 +1,3 @@
|
||||
import { Block } from '../primitives.js';
|
||||
export type Transform = (Block: Block) => Block;
|
||||
export declare function flow(...transforms: Transform[]): Transform;
|
||||
+90
@@ -0,0 +1,90 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.isSpace = isSpace;
|
||||
exports.hasCR = hasCR;
|
||||
exports.splitCR = splitCR;
|
||||
exports.splitSpace = splitSpace;
|
||||
exports.splitLines = splitLines;
|
||||
exports.seedBlock = seedBlock;
|
||||
exports.seedSpec = seedSpec;
|
||||
exports.seedTokens = seedTokens;
|
||||
exports.rewireSource = rewireSource;
|
||||
exports.rewireSpecs = rewireSpecs;
|
||||
function isSpace(source) {
|
||||
return /^\s+$/.test(source);
|
||||
}
|
||||
function hasCR(source) {
|
||||
return /\r$/.test(source);
|
||||
}
|
||||
function splitCR(source) {
|
||||
const matches = source.match(/\r+$/);
|
||||
return matches == null ? ['', source] : [source.slice(-matches[0].length), source.slice(0, -matches[0].length)];
|
||||
}
|
||||
function splitSpace(source) {
|
||||
const matches = source.match(/^\s+/);
|
||||
return matches == null ? ['', source] : [source.slice(0, matches[0].length), source.slice(matches[0].length)];
|
||||
}
|
||||
function splitLines(source) {
|
||||
return source.split(/\n/);
|
||||
}
|
||||
function seedBlock(block = {}) {
|
||||
return Object.assign({
|
||||
description: '',
|
||||
tags: [],
|
||||
source: [],
|
||||
problems: []
|
||||
}, block);
|
||||
}
|
||||
function seedSpec(spec = {}) {
|
||||
return Object.assign({
|
||||
tag: '',
|
||||
name: '',
|
||||
type: '',
|
||||
optional: false,
|
||||
description: '',
|
||||
problems: [],
|
||||
source: []
|
||||
}, spec);
|
||||
}
|
||||
function seedTokens(tokens = {}) {
|
||||
return Object.assign({
|
||||
start: '',
|
||||
delimiter: '',
|
||||
postDelimiter: '',
|
||||
tag: '',
|
||||
postTag: '',
|
||||
name: '',
|
||||
postName: '',
|
||||
type: '',
|
||||
postType: '',
|
||||
description: '',
|
||||
end: '',
|
||||
lineEnd: ''
|
||||
}, tokens);
|
||||
}
|
||||
/**
|
||||
* Assures Block.tags[].source contains references to the Block.source items,
|
||||
* using Block.source as a source of truth. This is a counterpart of rewireSpecs
|
||||
* @param block parsed coments block
|
||||
*/
|
||||
function rewireSource(block) {
|
||||
const source = block.source.reduce((acc, line) => acc.set(line.number, line), new Map());
|
||||
for (const spec of block.tags) {
|
||||
spec.source = spec.source.map(line => source.get(line.number));
|
||||
}
|
||||
return block;
|
||||
}
|
||||
/**
|
||||
* Assures Block.source contains references to the Block.tags[].source items,
|
||||
* using Block.tags[].source as a source of truth. This is a counterpart of rewireSource
|
||||
* @param block parsed coments block
|
||||
*/
|
||||
function rewireSpecs(block) {
|
||||
const source = block.tags.reduce((acc, spec) => spec.source.reduce((acc, line) => acc.set(line.number, line), acc), new Map());
|
||||
block.source = block.source.map(line => source.get(line.number) || line);
|
||||
return block;
|
||||
}
|
||||
//# sourceMappingURL=util.cjs.map
|
||||
+1
File diff suppressed because one or more lines are too long
+21
@@ -0,0 +1,21 @@
|
||||
import { Block, Tokens, Spec } from './primitives.js';
|
||||
export declare function isSpace(source: string): boolean;
|
||||
export declare function hasCR(source: string): boolean;
|
||||
export declare function splitCR(source: string): [string, string];
|
||||
export declare function splitSpace(source: string): [string, string];
|
||||
export declare function splitLines(source: string): string[];
|
||||
export declare function seedBlock(block?: Partial<Block>): Block;
|
||||
export declare function seedSpec(spec?: Partial<Spec>): Spec;
|
||||
export declare function seedTokens(tokens?: Partial<Tokens>): Tokens;
|
||||
/**
|
||||
* Assures Block.tags[].source contains references to the Block.source items,
|
||||
* using Block.source as a source of truth. This is a counterpart of rewireSpecs
|
||||
* @param block parsed coments block
|
||||
*/
|
||||
export declare function rewireSource(block: Block): Block;
|
||||
/**
|
||||
* Assures Block.source contains references to the Block.tags[].source items,
|
||||
* using Block.tags[].source as a source of truth. This is a counterpart of rewireSource
|
||||
* @param block parsed coments block
|
||||
*/
|
||||
export declare function rewireSpecs(block: Block): Block;
|
||||
+105
@@ -0,0 +1,105 @@
|
||||
# Migrating 0.x to 1.x
|
||||
|
||||
## Parser
|
||||
|
||||
0.x can be mostly translated into 1.x one way or another. The idea behind the new config structure is to handle only the most common cases, and provide the fallback for alternative implementation.
|
||||
|
||||
### `dotted_names: boolean`
|
||||
|
||||
> By default dotted names like `name.subname.subsubname` will be expanded into nested sections, this can be prevented by passing opts.dotted_names = false.
|
||||
|
||||
**Removed** This feature is removed but still can be done on top of the `parse()` output. Please post a request or contribute a PR if you need it.
|
||||
|
||||
### `trim: boolean`
|
||||
|
||||
> Set this to false to avoid the default of trimming whitespace at the start and end of each line.
|
||||
|
||||
In the new parser all original spacing is kept along with comment lines in `.source`. Description lines are joined together depending on `spacing` option
|
||||
|
||||
**New option:**
|
||||
|
||||
- `spacing: "compact"` lines concatenated with a single space and no line breaks
|
||||
- `spacing: "preserve"` keeps line breaks and space around as is. Indentation space counts from `*` delimiter or from the start of the line if the delimiter is omitted
|
||||
- `spacing: (lines: Line[]) => string` completely freeform joining strategy, since all original spacing can be accessed, there is no limit to how this can be implemented. See [primitives.ts](./src/primitives.ts) and [spacer.ts](./src/parser/spacer.ts)
|
||||
|
||||
### `join: string | number | boolean`
|
||||
|
||||
> If the following lines of a multiline comment do not start with a star, `join` will have the following effect on tag source (and description) when joining the lines together:
|
||||
>
|
||||
> - If a string, use that string in place of the leading whitespace (and avoid newlines).
|
||||
> - If a non-zero number (e.g., 1), do no trimming and avoid newlines.
|
||||
> - If undefined, false, or 0, use the default behavior of not trimming but adding a newline.
|
||||
> - Otherwise (e.g., if join is true), replace any leading whitespace with a single space and avoid newlines.
|
||||
>
|
||||
> Note that if a multi-line comment has lines that start with a star, these will be appended with initial whitespace as is and with newlines regardless of the join setting.
|
||||
|
||||
See the `spacing` option above, all the variations can be fine-tunned with `spacing: (lines: Line[]) => string`
|
||||
|
||||
### `fence: string | RegExp | ((source: string) => boolean)`
|
||||
|
||||
> Set to a string or regular expression to toggle state upon finding an odd number of matches within a line. Defaults to ```.
|
||||
>
|
||||
> If set to a function, it should return true to toggle fenced state; upon returning true the first time, this will prevent subsequent lines from being interpreted as starting a new jsdoc tag until such time as the function returns true again to indicate that the state has toggled back.
|
||||
|
||||
This is mostly kept the same
|
||||
|
||||
**New optoins:**
|
||||
|
||||
- ```` fence: '```' ```` same as 0.x
|
||||
- `fencer: (source: string) => boolean` same as 0.x, see [parser/block-parser.ts](./src/parser/block-parser.ts)
|
||||
|
||||
### `parsers: Parser[]`
|
||||
|
||||
> In case you need to parse tags in different way you can pass opts.parsers = [parser1, ..., parserN], where each parser is function name(str:String, data:Object):{source:String, data:Object}.
|
||||
> ...
|
||||
|
||||
**New options:**
|
||||
|
||||
- `tokenizers: []Tokenizer` is a list of functions extracting the `tag`, `type`, `name` and `description` tokens from this string. See [parser/spec-parser.ts](./src/parser/spec-parser.ts) and [primitives.ts](./src/primitives.ts)
|
||||
|
||||
Default tokenizers chain is
|
||||
|
||||
```js
|
||||
[
|
||||
tagTokenizer(),
|
||||
typeTokenizer(),
|
||||
nameTokenizer(),
|
||||
descriptionTokenizer(getSpacer(spacing)),
|
||||
]
|
||||
```
|
||||
|
||||
where
|
||||
|
||||
```ts
|
||||
type Tokenizer = (spec: Spec) => Spec
|
||||
|
||||
interface Spec {
|
||||
tag: string;
|
||||
name: string;
|
||||
default?: string;
|
||||
type: string;
|
||||
optional: boolean;
|
||||
description: string;
|
||||
problems: Problem[];
|
||||
source: Line[];
|
||||
}
|
||||
```
|
||||
|
||||
chain starts with blank `Spec` and each tokenizer fulfills a piece using `.source` input
|
||||
|
||||
## Stringifier
|
||||
|
||||
> One may also convert comment-parser JSON structures back into strings using the stringify method (stringify(o: (object|Array) [, opts: object]): string).
|
||||
> ...
|
||||
|
||||
Stringifier config follows the same strategy – a couple of common cases, and freeform formatter as a fallback
|
||||
|
||||
**New Options:**
|
||||
|
||||
- `format: "none"` re-assembles the source with original spacing and delimiters preserved
|
||||
- `format: "align"` aligns tag, name, type, and descriptions into fixed-width columns
|
||||
- `format: (tokens: Tokens) => string[]` do what you like, resulting lines will be concatenated into the output. Despite the simple interface, this can be turned into a complex stateful formatter, see `"align"` implementation in [transforms/align.ts](./src/transforms/align.ts)
|
||||
|
||||
## Stream
|
||||
|
||||
Work in progress
|
||||
+93
@@ -0,0 +1,93 @@
|
||||
{
|
||||
"name": "comment-parser",
|
||||
"version": "1.4.6",
|
||||
"description": "Generic JSDoc-like comment parser",
|
||||
"type": "module",
|
||||
"main": "lib/index.cjs",
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
},
|
||||
"exports": {
|
||||
".": {
|
||||
"import": "./es6/index.js",
|
||||
"require": "./lib/index.cjs"
|
||||
},
|
||||
"./primitives": {
|
||||
"import": "./es6/primitives.js",
|
||||
"require": "./lib/primitives.cjs"
|
||||
},
|
||||
"./util": {
|
||||
"import": "./es6/util.js",
|
||||
"require": "./lib/util.cjs"
|
||||
},
|
||||
"./parser/*": {
|
||||
"import": "./es6/parser/*.js",
|
||||
"require": "./lib/parser/*.cjs"
|
||||
},
|
||||
"./stringifier/*": {
|
||||
"import": "./es6/stringifier/*.js",
|
||||
"require": "./lib/stringifier/*.cjs"
|
||||
},
|
||||
"./transforms/*": {
|
||||
"import": "./es6/transforms/*.js",
|
||||
"require": "./lib/transforms/*.cjs"
|
||||
}
|
||||
},
|
||||
"types": "lib/index.d.ts",
|
||||
"directories": {
|
||||
"test": "tests"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@changesets/cli": "^2.29.8",
|
||||
"@types/jest": "^30.0.0",
|
||||
"convert-extension": "^0.3.0",
|
||||
"jest": "^30.1.3",
|
||||
"prettier": "3.6.2",
|
||||
"rimraf": "^6.0.1",
|
||||
"rollup": "^4.52.0",
|
||||
"ts-jest": "^29.4.4",
|
||||
"typescript": "^5.9.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 12.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "rimraf lib es6 browser; tsc -p tsconfig.json && tsc -p tsconfig.node.json && rollup -o browser/index.js -f iife --context window -n CommentParser es6/index.js && convert-extension cjs lib/",
|
||||
"format": "prettier --write src tests",
|
||||
"pretest": "rimraf coverage; npm run build",
|
||||
"test": "prettier --check src tests && jest --verbose",
|
||||
"preversion": "npm run build",
|
||||
"prepare": "npm run build",
|
||||
"prepublishOnly": "npm run build",
|
||||
"release:add": "changeset",
|
||||
"release:version": "git pull origin main && changeset version && npm install && git add . && git commit -m \"release v$(node -p \"require('./package.json').version\")\"",
|
||||
"release:publish": "git pull origin main && changeset publish && git push --follow-tags"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+ssh://git@github.com/yavorskiy/comment-parser.git"
|
||||
},
|
||||
"keywords": [
|
||||
"jsdoc",
|
||||
"comments",
|
||||
"parser"
|
||||
],
|
||||
"author": "Sergiy Yavorsky <sergiy@yavorsky.me> (https://github.com/syavorsky)",
|
||||
"contributors": [
|
||||
"Alex Grozav (https://github.com/alexgrozav)",
|
||||
"Alexej Yaroshevich (https://github.com/zxqfox)",
|
||||
"Andre Wachsmuth (https://github.com/blutorange)",
|
||||
"Brett Zamir (https://github.com/brettz9)",
|
||||
"Dieter Oberkofler (https://github.com/doberkofler)",
|
||||
"Evgeny Reznichenko (https://github.com/zxcabs)",
|
||||
"Javier \"Ciberma\" Mora (https://github.com/jhm-ciberman)",
|
||||
"Jayden Seric (https://github.com/jaydenseric)",
|
||||
"Jordan Harband (https://github.com/ljharb)",
|
||||
"tengattack (https://github.com/tengattack)"
|
||||
],
|
||||
"license": "MIT",
|
||||
"bugs": {
|
||||
"url": "https://github.com/syavorsky/comment-parser/issues"
|
||||
},
|
||||
"homepage": "https://github.com/syavorsky/comment-parser"
|
||||
}
|
||||
+36
@@ -0,0 +1,36 @@
|
||||
import getParser, { Options as ParserOptions } from './parser/index.js';
|
||||
import descriptionTokenizer from './parser/tokenizers/description.js';
|
||||
import nameTokenizer from './parser/tokenizers/name.js';
|
||||
import tagTokenizer from './parser/tokenizers/tag.js';
|
||||
import typeTokenizer from './parser/tokenizers/type.js';
|
||||
import getStringifier from './stringifier/index.js';
|
||||
import alignTransform from './transforms/align.js';
|
||||
import indentTransform from './transforms/indent.js';
|
||||
import crlfTransform from './transforms/crlf.js';
|
||||
import { flow as flowTransform } from './transforms/index.js';
|
||||
import { rewireSpecs, rewireSource, seedBlock, seedTokens } from './util.js';
|
||||
|
||||
export * from './primitives.js';
|
||||
|
||||
export function parse(source: string, options: Partial<ParserOptions> = {}) {
|
||||
return getParser(options)(source);
|
||||
}
|
||||
|
||||
export const stringify = getStringifier();
|
||||
export { default as inspect } from './stringifier/inspect.js';
|
||||
|
||||
export const transforms = {
|
||||
flow: flowTransform,
|
||||
align: alignTransform,
|
||||
indent: indentTransform,
|
||||
crlf: crlfTransform,
|
||||
};
|
||||
|
||||
export const tokenizers = {
|
||||
tag: tagTokenizer,
|
||||
type: typeTokenizer,
|
||||
name: nameTokenizer,
|
||||
description: descriptionTokenizer,
|
||||
};
|
||||
|
||||
export const util = { rewireSpecs, rewireSource, seedBlock, seedTokens };
|
||||
+60
@@ -0,0 +1,60 @@
|
||||
import { Line } from '../primitives.js';
|
||||
|
||||
const reTag = /^@[^\s/]+(?=\s|$)/;
|
||||
|
||||
/**
|
||||
* Groups source lines in sections representing tags.
|
||||
* First section is a block description if present. Last section captures lines starting with
|
||||
* the last tag to the end of the block, including dangling closing marker.
|
||||
* @param {Line[]} block source lines making a single comment block
|
||||
*/
|
||||
export type Parser = (block: Line[]) => Line[][];
|
||||
|
||||
/**
|
||||
* Predicate telling if string contains opening/closing escaping sequence
|
||||
* @param {string} source raw source line
|
||||
*/
|
||||
export type Fencer = (source: string) => boolean;
|
||||
|
||||
/**
|
||||
* `Parser` configuration options
|
||||
*/
|
||||
export interface Options {
|
||||
// escaping sequence or predicate
|
||||
fence: string | Fencer;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates configured `Parser`
|
||||
* @param {Partial<Options>} options
|
||||
*/
|
||||
export default function getParser({
|
||||
fence = '```',
|
||||
}: Partial<Options> = {}): Parser {
|
||||
const fencer = getFencer(fence);
|
||||
const toggleFence = (source: string, isFenced: boolean): boolean =>
|
||||
fencer(source) ? !isFenced : isFenced;
|
||||
|
||||
return function parseBlock(source: Line[]): Line[][] {
|
||||
// start with description section
|
||||
const sections: Line[][] = [[]];
|
||||
|
||||
let isFenced = false;
|
||||
for (const line of source) {
|
||||
if (reTag.test(line.tokens.description) && !isFenced) {
|
||||
sections.push([line]);
|
||||
} else {
|
||||
sections[sections.length - 1].push(line);
|
||||
}
|
||||
isFenced = toggleFence(line.tokens.description, isFenced);
|
||||
}
|
||||
|
||||
return sections;
|
||||
};
|
||||
}
|
||||
|
||||
function getFencer(fence: string | Fencer): Fencer {
|
||||
if (typeof fence === 'string')
|
||||
return (source: string) => source.split(fence).length % 2 === 0;
|
||||
return fence;
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user