prefix
stringlengths
82
32.6k
middle
stringlengths
5
470
suffix
stringlengths
0
81.2k
file_path
stringlengths
6
168
repo_name
stringlengths
16
77
context
listlengths
5
5
lang
stringclasses
4 values
ground_truth
stringlengths
5
470
import { isContextNode, type ContextNode, type Edge, type Node, type Style, } from "../types"; import { add, difference, multiply, normalize, rotate, splitName, } from "../utils"; import { SvgGraphics } from "./SvgGraphics"; import { Graphics, Path, Renderable, SvgAttrs } from "./types"; const CTX_STROKE = "#aaaaaa"; const NOTE_STROKE = "#555555"; const ARROW_SIZE = 1.5; const pickFontSize = (words: string[], w: number) => { const max = words .map((word) => word.length) .sort((a, b) => b - a) .at(0)!; return Math.floor(Math.min(Math.max(Math.ceil(w / max), 8), 24)); }; const sizeText = ( text: string[], w: number, h: number ): { lines: string[]; fontSize: number } => { let fontSize = pickFontSize(text, w); while (fontSize > 5) { const maxWidth = Math.ceil(w / fontSize) - 1; const maxHeight = Math.floor(h / fontSize) - 1; const lines: string[] = []; let line = text[0]; let n = 1; while (n < text.length) { const word = text[n++]; if (line.length + word.length >= maxWidth) { lines.push(line); line = word; } else line = line.concat(line.length ? " " : "", word); } lines.push(line); if (n === text.length && lines.length <= maxHeight) return { lines, fontSize, }; fontSize--; } return { lines: [text.join(" ")], fontSize, }; }; const renderText = ( text: string[], w: number, h: number, g: Graphics, options: { fit: boolean; x?: number; y?: number; w?: number; h?: number; fontSize?: number; } = { fit: true } ) => { const width = options.w || w || 0; const height = options.h || h || 0; const { lines, fontSize } = options.fit ? sizeText(text, width, height) : { lines: text, fontSize: options.fontSize || 12, }; g.attr("font-size", fontSize + "pt"); const x = options.x || Math.floor(width / 2); const y = options.y || Math.floor(height / 2); const m = Math.floor(lines.length / 2); const o = lines.length % 2 ? 0.3 : 0.9; lines.forEach((line, i) => { g.text(line, x, y, { fill: NOTE_STROKE, dy: `${((i - m) * 1.2 + o).toFixed(2)}em`, }); }); }; const getPath = (edge: Edge): Path[] => { if (edge.path) { const path = edge.path!.slice(1, -1); const endDir = normalize(difference(path[path.length - 2], path.at(-1)!)); const end = path.length - 1; const copy = path.map((p) => ({ x: p.x, y: p.y })); copy[end] = add( copy[end], multiply(endDir, ARROW_SIZE * (edge.arrow ? 5 : 0)) ); return copy; } const x1 = edge.source.x! + edge.source.width! / 2; const x2 = edge.target.x! - edge.target.width! / 2; const y1 = edge.source.y!; const y2 = edge.target.y!; if (y1 === y2) return [{ x: x1, y: y1 }, { dx: x2 - x1 }]; const dx = Math.floor((x2 - x1) / 2); const dy = Math.floor(y2 - y1); return [{ x: x1, y: y1 }, { dx }, { dy }, { dx }]; }; const renderEdge = (edge: Edge, g: Graphics) => { const attrs: SvgAttrs = { fill: "none", stroke: edge.arrow ? edge.color : edge.target.color, }; edge.arrow && (attrs["stroke-width"] = 3); g.path(getPath(edge), false, { ...attrs }); if (edge.arrow) { const end = edge.path![edge.path!.length - 2]; const path = edge.path!.slice(1, -1); const dir = normalize(difference(path[path.length - 2], path.at(-1)!)); const x = (s: number) => add(end, multiply(dir, s * ARROW_SIZE)); const y = (s: number) => multiply(rotate(dir), s * ARROW_SIZE); g.path([add(x(10), y(4)), x(5), add(x(10), y(-4)), end], true, { ...attrs, fill: edge.color, }); } }; const renderSimpleRef = ( node: Node, x: number, y: number, w: number, h: number, g: Graphics ) => { g.group("", "").attr("fill", node.color); g.rect(x, y, w, h); renderText(splitName(node.name), w, h, g, { fit: true, x: x + w / 2, y: y + h / 2, w, h, }); g.ungroup(); }; const renderRef = ( node: Node, x: number, y: number, w: number, h: number, g: Graphics ) => { renderSimpleRef(node, x, y, w, h, g); const hw = Math.ceil(w / 2); const hh = Math.ceil(h / 2); node.refs && [...node.refs].forEach((target, i) => renderSimpleRef(target, x - hw + 4, y + i * (hh + 2) - 4, hw, hh, g) ); }; const renderMultilineRef = ( targets: Node[], x: number, y: number, w: number, h: number, g: Graphics ) => { const text = targets.map((target) => `- ${splitName(target.name).join(" ")}`); g.group("", "") .attr("fill", targets[0].color) .attr("text-align", "left") .attr("text-anchor", "start"); g.rect(x, y, w, h); renderText(text, w, h, g, { fit: true, x: x + 4, y: y + h / 2, w, h, }); g.ungroup(); }; const renderCommandRefs = ( node: Node, x: number, y: number, w: number, h: number, g: Graphics ) => { const targets = [...node.refs!]; const th = Math.floor(h / targets.length); targets.forEach((target, i) => renderRef(target, x, y + i * (th + 2), w, th, g) ); }; const renderRefs = (node: Node, g: Graphics, style: Style) => { if (node.refs && node.visual !== "actor") { const x = Math.floor(node.x! - node.width! / 2 - style.scale * 0.2); const y = Math.floor(node.y! + node.height! * 0.4); const w = Math.floor(style.scale); const h = Math.floor(style.scale / 2); node.refs?.size > 1 ? node.visual === "command" ? renderCommandRefs(node, x, y, w, h, g) : renderMultilineRef([...node.refs], x, y, w, h, g) : renderRef([...node.refs][0], x, y, w, h, g); } }; const context: Renderable = (ctx: Node, g: Graphics, style: Style) => { if (isContextNode(ctx)) { if (ctx.name) { const words = splitName(ctx.name); g.text(words.join(" "), 0, 0, { fill: CTX_STROKE, stroke: CTX_STROKE, dy: -style.fontSize, }); g.rect(0, 0, ctx.width!, ctx.height!, { rx: 25, ry: 25 }); } g.group(`n${ctx.index}`, "", { dx: style.padding, dy: style.padding }); if (ctx.name) g.attr("text-align", "center") .attr("text-anchor", "middle") .attr("stroke", NOTE_STROKE); ctx.edges.forEach( (e) => e.color && renderEdge({ ...e, source: ctx.nodes.get(e.source.name)! }, g) ); ctx.nodes.forEach((n) => n.color && renderNode(n, g, style)); ctx.nodes.forEach((n) => n.refs && renderRefs(n, g, style)); g.ungroup(); } }; const note: Renderable = (node: Node, g: Graphics) => { g.attr("fill", node.color!); g.rect(0, 0, node.width!, node.height!); if (node.rels) g.attr( "class", node.visual.concat(" ", [...node.rels].map((i) => `n${i}`).join(" ")) ); renderText(splitName(node.name), node.width!, node.height!, g); const schema = node.ctx.schemas.get(node.name); schema && g.text(`{${schema.size}}`, node.width! - 6, 6, { "font-size": "6pt", fill: NOTE_STROKE, }); }; const renderNode = (node: Node, g: Graphics, style: Style) => { const dx = Math.floor(node.x! - node.width! / 2); const dy = Math.floor(node.y! - node.height! / 2); const render = node.visual === "context" ? context : node.visual !== "actor" ? note : undefined; // don't render actors as nodes if (render) { g.group(`n${node.index}`, node.name, { class: node.visual, dx, dy }); render(node, g, style); g.ungroup(); } }; export const render = (root: ContextNode, style: Style): string => { const
g = new SvgGraphics({
fill: style.fill, "font-family": style.font, "font-size": style.fontSize + "pt", "text-align": "left", stroke: style.stroke, "stroke-width": 1, }); context(root, g, style); return g.serialize(); };
src/graphics/render.ts
Rotorsoft-esml-85e903b
[ { "filename": "src/graphics/SvgGraphics.ts", "retrieved_chunk": " constructor(attrs: SvgAttrs) {\n this.root = this.current = new SvgElement(\"g\", {\n ...attrs,\n \"data-name\": \"root\",\n });\n }\n group(\n id: string,\n name: string,\n attrs?: { class?: string; dx?: number; dy?: number }", "score": 0.8412830829620361 }, { "filename": "src/graphics/SvgGraphics.ts", "retrieved_chunk": " style?: string;\n rx?: number;\n ry?: number;\n }\n ) {\n this._new(\"rect\", { x, y, height, width, ...attrs });\n }\n path(path: Path[], close?: boolean, attrs?: SvgAttrs) {\n const d = path\n .map((p, i) =>", "score": 0.8368756771087646 }, { "filename": "src/graphics/layout.ts", "retrieved_chunk": " node.width = 0;\n node.height = 0;\n};\nexport const layout = (root: ContextNode, style: Style) => {\n function layouter(visual: Visual): Layouter {\n switch (visual) {\n case \"context\":\n return layoutContext as Layouter;\n case \"actor\":\n return actor;", "score": 0.826240599155426 }, { "filename": "src/graphics/layout.ts", "retrieved_chunk": "const rectangle: Layouter = (node: Node, style: Style) => {\n node.x = 0;\n node.y = 0;\n node.width = style.scale * 2;\n node.height = style.scale;\n};\n// don't render actors as nodes\nconst actor: Layouter = (node: Node) => {\n node.x = 0;\n node.y = 0;", "score": 0.8237751722335815 }, { "filename": "src/graphics/types.ts", "retrieved_chunk": " ry?: number;\n }\n ): void;\n path(path: Path[], close?: boolean, attrs?: SvgAttrs): void;\n serialize(): string;\n}\nexport type Renderable = (node: Node, g: Graphics, style: Style) => void;", "score": 0.8178485631942749 } ]
typescript
g = new SvgGraphics({
import { isContextNode, type ContextNode, type Edge, type Node, type Style, } from "../types"; import { add, difference, multiply, normalize, rotate, splitName, } from "../utils"; import { SvgGraphics } from "./SvgGraphics"; import { Graphics, Path, Renderable, SvgAttrs } from "./types"; const CTX_STROKE = "#aaaaaa"; const NOTE_STROKE = "#555555"; const ARROW_SIZE = 1.5; const pickFontSize = (words: string[], w: number) => { const max = words .map((word) => word.length) .sort((a, b) => b - a) .at(0)!; return Math.floor(Math.min(Math.max(Math.ceil(w / max), 8), 24)); }; const sizeText = ( text: string[], w: number, h: number ): { lines: string[]; fontSize: number } => { let fontSize = pickFontSize(text, w); while (fontSize > 5) { const maxWidth = Math.ceil(w / fontSize) - 1; const maxHeight = Math.floor(h / fontSize) - 1; const lines: string[] = []; let line = text[0]; let n = 1; while (n < text.length) { const word = text[n++]; if (line.length + word.length >= maxWidth) { lines.push(line); line = word; } else line = line.concat(line.length ? " " : "", word); } lines.push(line); if (n === text.length && lines.length <= maxHeight) return { lines, fontSize, }; fontSize--; } return { lines: [text.join(" ")], fontSize, }; }; const renderText = ( text: string[], w: number, h: number, g: Graphics, options: { fit: boolean; x?: number; y?: number; w?: number; h?: number; fontSize?: number; } = { fit: true } ) => { const width = options.w || w || 0; const height = options.h || h || 0; const { lines, fontSize } = options.fit ? sizeText(text, width, height) : { lines: text, fontSize: options.fontSize || 12, }; g.attr("font-size", fontSize + "pt"); const x = options.x || Math.floor(width / 2); const y = options.y || Math.floor(height / 2); const m = Math.floor(lines.length / 2); const o = lines.length % 2 ? 0.3 : 0.9; lines.forEach((line, i) => { g.text(line, x, y, { fill: NOTE_STROKE, dy: `${((i - m) * 1.2 + o).toFixed(2)}em`, }); }); };
const getPath = (edge: Edge): Path[] => {
if (edge.path) { const path = edge.path!.slice(1, -1); const endDir = normalize(difference(path[path.length - 2], path.at(-1)!)); const end = path.length - 1; const copy = path.map((p) => ({ x: p.x, y: p.y })); copy[end] = add( copy[end], multiply(endDir, ARROW_SIZE * (edge.arrow ? 5 : 0)) ); return copy; } const x1 = edge.source.x! + edge.source.width! / 2; const x2 = edge.target.x! - edge.target.width! / 2; const y1 = edge.source.y!; const y2 = edge.target.y!; if (y1 === y2) return [{ x: x1, y: y1 }, { dx: x2 - x1 }]; const dx = Math.floor((x2 - x1) / 2); const dy = Math.floor(y2 - y1); return [{ x: x1, y: y1 }, { dx }, { dy }, { dx }]; }; const renderEdge = (edge: Edge, g: Graphics) => { const attrs: SvgAttrs = { fill: "none", stroke: edge.arrow ? edge.color : edge.target.color, }; edge.arrow && (attrs["stroke-width"] = 3); g.path(getPath(edge), false, { ...attrs }); if (edge.arrow) { const end = edge.path![edge.path!.length - 2]; const path = edge.path!.slice(1, -1); const dir = normalize(difference(path[path.length - 2], path.at(-1)!)); const x = (s: number) => add(end, multiply(dir, s * ARROW_SIZE)); const y = (s: number) => multiply(rotate(dir), s * ARROW_SIZE); g.path([add(x(10), y(4)), x(5), add(x(10), y(-4)), end], true, { ...attrs, fill: edge.color, }); } }; const renderSimpleRef = ( node: Node, x: number, y: number, w: number, h: number, g: Graphics ) => { g.group("", "").attr("fill", node.color); g.rect(x, y, w, h); renderText(splitName(node.name), w, h, g, { fit: true, x: x + w / 2, y: y + h / 2, w, h, }); g.ungroup(); }; const renderRef = ( node: Node, x: number, y: number, w: number, h: number, g: Graphics ) => { renderSimpleRef(node, x, y, w, h, g); const hw = Math.ceil(w / 2); const hh = Math.ceil(h / 2); node.refs && [...node.refs].forEach((target, i) => renderSimpleRef(target, x - hw + 4, y + i * (hh + 2) - 4, hw, hh, g) ); }; const renderMultilineRef = ( targets: Node[], x: number, y: number, w: number, h: number, g: Graphics ) => { const text = targets.map((target) => `- ${splitName(target.name).join(" ")}`); g.group("", "") .attr("fill", targets[0].color) .attr("text-align", "left") .attr("text-anchor", "start"); g.rect(x, y, w, h); renderText(text, w, h, g, { fit: true, x: x + 4, y: y + h / 2, w, h, }); g.ungroup(); }; const renderCommandRefs = ( node: Node, x: number, y: number, w: number, h: number, g: Graphics ) => { const targets = [...node.refs!]; const th = Math.floor(h / targets.length); targets.forEach((target, i) => renderRef(target, x, y + i * (th + 2), w, th, g) ); }; const renderRefs = (node: Node, g: Graphics, style: Style) => { if (node.refs && node.visual !== "actor") { const x = Math.floor(node.x! - node.width! / 2 - style.scale * 0.2); const y = Math.floor(node.y! + node.height! * 0.4); const w = Math.floor(style.scale); const h = Math.floor(style.scale / 2); node.refs?.size > 1 ? node.visual === "command" ? renderCommandRefs(node, x, y, w, h, g) : renderMultilineRef([...node.refs], x, y, w, h, g) : renderRef([...node.refs][0], x, y, w, h, g); } }; const context: Renderable = (ctx: Node, g: Graphics, style: Style) => { if (isContextNode(ctx)) { if (ctx.name) { const words = splitName(ctx.name); g.text(words.join(" "), 0, 0, { fill: CTX_STROKE, stroke: CTX_STROKE, dy: -style.fontSize, }); g.rect(0, 0, ctx.width!, ctx.height!, { rx: 25, ry: 25 }); } g.group(`n${ctx.index}`, "", { dx: style.padding, dy: style.padding }); if (ctx.name) g.attr("text-align", "center") .attr("text-anchor", "middle") .attr("stroke", NOTE_STROKE); ctx.edges.forEach( (e) => e.color && renderEdge({ ...e, source: ctx.nodes.get(e.source.name)! }, g) ); ctx.nodes.forEach((n) => n.color && renderNode(n, g, style)); ctx.nodes.forEach((n) => n.refs && renderRefs(n, g, style)); g.ungroup(); } }; const note: Renderable = (node: Node, g: Graphics) => { g.attr("fill", node.color!); g.rect(0, 0, node.width!, node.height!); if (node.rels) g.attr( "class", node.visual.concat(" ", [...node.rels].map((i) => `n${i}`).join(" ")) ); renderText(splitName(node.name), node.width!, node.height!, g); const schema = node.ctx.schemas.get(node.name); schema && g.text(`{${schema.size}}`, node.width! - 6, 6, { "font-size": "6pt", fill: NOTE_STROKE, }); }; const renderNode = (node: Node, g: Graphics, style: Style) => { const dx = Math.floor(node.x! - node.width! / 2); const dy = Math.floor(node.y! - node.height! / 2); const render = node.visual === "context" ? context : node.visual !== "actor" ? note : undefined; // don't render actors as nodes if (render) { g.group(`n${node.index}`, node.name, { class: node.visual, dx, dy }); render(node, g, style); g.ungroup(); } }; export const render = (root: ContextNode, style: Style): string => { const g = new SvgGraphics({ fill: style.fill, "font-family": style.font, "font-size": style.fontSize + "pt", "text-align": "left", stroke: style.stroke, "stroke-width": 1, }); context(root, g, style); return g.serialize(); };
src/graphics/render.ts
Rotorsoft-esml-85e903b
[ { "filename": "src/graphics/layout.ts", "retrieved_chunk": " const ne = ctx.edges.get(e.name!)!;\n ne.path = [ne.source, ...ge.points!, ne.target].map((n) => ({\n x: Math.floor(n.x!),\n y: Math.floor(n.y!),\n }));\n });\n const { width = 0, height = 0 } = graph.graph();\n ctx.width = width + PAD;\n ctx.height = height + PAD;\n } else {", "score": 0.8397263884544373 }, { "filename": "src/canvas.ts", "retrieved_chunk": " this.fitZoom(this.zoom + z);\n this.transform();\n }\n private fitZoom(z: number) {\n this.zoom = Math.round(Math.min(Math.max(0.1, z), 3) * 100) / 100;\n }\n private transform(dx = 0, dy = 0) {\n const g = this.svg.children[0];\n if (g) {\n this.x = Math.floor(", "score": 0.8381598591804504 }, { "filename": "src/graphics/SvgGraphics.ts", "retrieved_chunk": " style?: string;\n rx?: number;\n ry?: number;\n }\n ) {\n this._new(\"rect\", { x, y, height, width, ...attrs });\n }\n path(path: Path[], close?: boolean, attrs?: SvgAttrs) {\n const d = path\n .map((p, i) =>", "score": 0.8170057535171509 }, { "filename": "src/graphics/SvgGraphics.ts", "retrieved_chunk": " this._new(\"path\", { ...attrs, d });\n }\n text(\n text: string,\n x: number,\n y: number,\n attrs?: {\n fill?: string;\n stroke?: string;\n dy?: number | string;", "score": 0.7925804853439331 }, { "filename": "src/esml.ts", "retrieved_chunk": " error?: Error;\n svg?: string;\n width?: number;\n height?: number;\n nodes?: Node[];\n} => {\n const style: Style = {\n scale,\n stroke: \"#dedede\",\n fill: \"white\",", "score": 0.790642499923706 } ]
typescript
const getPath = (edge: Edge): Path[] => {
import { BlockNode } from './index'; import { createBlockNodeName, createDataKey } from './types'; import { BlockTune, createBlockTuneName } from '../BlockTune'; import { TextNode } from '../TextNode'; import { ValueNode } from '../ValueNode'; import type { EditorDocument } from '../EditorDocument'; import type { BlockTuneConstructorParameters } from '../BlockTune/types'; import type { TextNodeConstructorParameters } from '../TextNode'; import type { ValueNodeConstructorParameters } from '../ValueNode'; describe('BlockNode', () => { describe('.serialized', () => { beforeEach(() => { jest.mock('../BlockTune', () => ({ BlockTune: jest.fn().mockImplementation(() => ({}) as BlockTune), serialized: jest.fn(), })); jest.mock('../TextNode', () => ({ TextNode: jest.fn().mockImplementation(() => ({}) as TextNode), serialized: jest.fn(), })); jest.mock('../ValueNode', () => ({ ValueNode: jest.fn().mockImplementation(() => ({}) as ValueNode), serialized: jest.fn(), })); }); afterEach(() => { jest.clearAllMocks(); }); it('should return a name of a tool that created a BlockNode', () => { const blockNodeName = createBlockNodeName('paragraph'); const blockNode = new BlockNode({ name: blockNodeName, data: {}, parent: {} as EditorDocument, }); const serialized = blockNode.serialized; expect(serialized.name).toEqual(blockNodeName); }); it('should call .serialized getter of all tunes associated with the BlockNode', () => { const blockTunesNames = [ createBlockTuneName('align'), createBlockTuneName('font-size'), createBlockTuneName('font-weight'), ]; const blockTunes = blockTunesNames.reduce((acc, name) => ({ ...acc,
[name]: new BlockTune({} as BlockTuneConstructorParameters), }), {});
const spyArray = Object .values(blockTunes) .map((blockTune) => { return jest.spyOn(blockTune as BlockTune, 'serialized', 'get'); }); const blockNode = new BlockNode({ name: createBlockNodeName('paragraph'), data: {}, parent: {} as EditorDocument, tunes: blockTunes, }); blockNode.serialized; spyArray.forEach((spy) => { expect(spy).toHaveBeenCalled(); }); }); it('should call .serialized getter of all child ValueNodes associated with the BlockNode', () => { const countOfValueNodes = 2; const valueNodes = [ ...Array(countOfValueNodes).keys() ] .reduce((acc, index) => ({ ...acc, [createDataKey(`data-key-${index}c${index}d`)]: new ValueNode({} as ValueNodeConstructorParameters), }), {}); const spyArray = Object .values(valueNodes) .map((valueNode) => { return jest.spyOn(valueNode as ValueNode, 'serialized', 'get'); }); const blockNode = new BlockNode({ name: createBlockNodeName('paragraph'), data: { ...valueNodes, }, parent: {} as EditorDocument, }); blockNode.serialized; spyArray.forEach((spy) => { expect(spy).toHaveBeenCalled(); }); }); it('should call .serialized getter of all child TextNodes associated with the BlockNode', () => { const countOfTextNodes = 3; const textNodes = [ ...Array(countOfTextNodes).keys() ] .map(() => new TextNode({} as TextNodeConstructorParameters)); const spyArray = textNodes .map((textNode) => { return jest.spyOn(textNode, 'serialized', 'get'); }); const blockNode = new BlockNode({ name: createBlockNodeName('paragraph'), data: { [createDataKey('data-key-1a2b')]: textNodes, }, parent: {} as EditorDocument, }); blockNode.serialized; spyArray.forEach((spy) => { expect(spy).toHaveBeenCalled(); }); }); }); });
src/entities/BlockNode/BlockNode.spec.ts
editor-js-document-model-4cb9623
[ { "filename": "src/utils/mocks/createBlockTuneMock.ts", "retrieved_chunk": " data?: Record<string, unknown>,\n}): BlockTune {\n return new BlockTune(\n {\n name: name || createBlockTuneName('aligning'),\n data: data || {},\n }\n );\n}", "score": 0.8377156257629395 }, { "filename": "src/utils/mocks/createBlockNodeMock.ts", "retrieved_chunk": " */\nexport function createBlockNodeMock({ name, parent, tunes, data }: { name?: BlockNodeName, parent: EditorDocument, data?: BlockNodeData, tunes?: Record<BlockTuneName, BlockTune> }): BlockNode {\n return new BlockNode({\n name: name || createBlockNodeName('header'),\n parent,\n data: data || {},\n tunes: tunes || {},\n });\n}", "score": 0.826280951499939 }, { "filename": "src/entities/BlockTune/BlockTune.spec.ts", "retrieved_chunk": " });\n describe('.serialized', () => {\n it('should return serialized version of the BlockTune', () => {\n // Arrange\n const tune = createBlockTuneMock({\n name: createBlockTuneName('styling'),\n data: {\n background: 'transparent',\n },\n });", "score": 0.8216809034347534 }, { "filename": "src/entities/BlockNode/index.ts", "retrieved_chunk": " .entries(this.#tunes)\n .reduce(\n (acc, [name, tune]) => {\n acc[name] = tune.serialized;\n return acc;\n },\n {} as Record<string, BlockTuneSerialized>\n );\n return {\n name: this.#name,", "score": 0.8130174875259399 }, { "filename": "src/utils/mocks/createBlockTuneMock.ts", "retrieved_chunk": "import { BlockTune, BlockTuneName, createBlockTuneName } from '../../entities/BlockTune';\n/**\n * Creates a BlockTune instance with the given name and data.\n *\n * @param args - BlockTune constructor arguments.\n * @param args.name - The name of the tune.\n * @param args.data - Any additional data associated with the tune.\n */\nexport function createBlockTuneMock({ name, data }: {\n name?: BlockTuneName,", "score": 0.7877755761146545 } ]
typescript
[name]: new BlockTune({} as BlockTuneConstructorParameters), }), {});
import { FormattingNodeConstructorParameters, InlineToolName, InlineToolData } from './types'; import { ChildNode, InlineFragment, InlineNode, InlineNodeSerialized, ParentNode } from '../interfaces'; export * from './types'; /** * We need to extend FormattingNode interface with ChildNode and ParentNode ones to use the methods from mixins */ export interface FormattingNode extends ChildNode, ParentNode {} /** * FormattingNode class represents a node in a tree-like structure, used to store and manipulate formatted text content */ @ParentNode @ChildNode export class FormattingNode implements InlineNode { /** * Private field representing the name of the formatting tool applied to the content */ #tool: InlineToolName; /** * Any additional data associated with the formatting tool */ #data?: InlineToolData; /** * Constructor for FormattingNode class. * * @param args - FormattingNode constructor arguments. * @param args.tool - The name of the formatting tool applied to the content. * @param args.data - Any additional data associated with the formatting. */ constructor({ tool, data }: FormattingNodeConstructorParameters) { this.#tool = tool; this.#data = data; } /** * Returns text value length of current node (including subtree) */ public get length(): number {
return this.children.reduce((sum, child) => sum + child.length, 0);
} /** * Returns serialized value of the node: text and formatting fragments */ public get serialized(): InlineNodeSerialized { return { text: this.getText(), fragments: this.getFragments(), }; } /** * Inserts text to the specified index, by default appends text to the end of the current value * * @param text - text to insert * @param [index] - char index where to insert text */ public insertText(text: string, index = this.length): void { const [child, offset] = this.#findChildByIndex(index); child?.insertText(text, index - offset); } /** * Removes text form the specified range * * @param [start] - start char index of the range, by default 0 * @param [end] - end char index of the range, by default length of the text value * @returns {string} removed text */ public removeText(start = 0, end = this.length): string { const result = this.#reduceChildrenInRange( start, end, (acc, child, childStart, childEnd) => { return acc + child.removeText(childStart, childEnd); }, '' ); if (this.length === 0) { this.remove(); } return result; } /** * Returns text from the specified range * * @param [start] - start char index of the range, by default 0 * @param [end] - end char index of the range, by default length of the text value */ public getText(start = 0, end = this.length): string { return this.#reduceChildrenInRange( start, end, (acc, child, childStart, childEnd) => { return acc + child.getText(childStart, childEnd); }, '' ); } /** * Returns inline fragments for subtree including current node from the specified range * * @param [start] - start char index of the range, by default 0 * @param [end] - end char index of the range, by default length of the text value */ public getFragments(start = 0, end = this.length): InlineFragment[] { return this.#reduceChildrenInRange<InlineFragment[]>( start, end, (acc, child, childStart, childEnd) => { /** * If child is not a FormattingNode, it doesn't include any fragments. So we skip it. */ if (!(child instanceof FormattingNode)) { return acc; } acc.push(...child.getFragments(childStart, childEnd)); return acc; }, [ { tool: this.#tool, data: this.#data, range: [start, end], } ] ); } /** * Splits current node by the specified index * * @param index - char index where to split the node * @returns {FormattingNode | null} new node */ public split(index: number): FormattingNode | null { if (index === 0 || index === this.length) { return null; } const newNode = new FormattingNode({ tool: this.#tool, data: this.#data, }); const [child, offset] = this.#findChildByIndex(index); if (!child) { return null; } // Have to save length as it is changed after split const childLength = child.length; const splitNode = child.split(index - offset); let midNodeIndex = this.children.indexOf(child); /** * If node is split or if node is not split but index equals to child length, we should split children from the next node */ if (splitNode || (index - offset === childLength)) { midNodeIndex += 1; } newNode.append(...this.children.slice(midNodeIndex)); this.parent?.insertAfter(this, newNode); return newNode; } /** * Applies formatting to the text with specified inline tool in the specified range * * @param tool - name of inline tool to apply * @param start - char start index of the range * @param end - char end index of the range * @param [data] - inline tool data if applicable */ public format(tool: InlineToolName, start: number, end: number, data?: InlineToolData): InlineNode[] { /** * In case current tool is the same as new one, do nothing */ if (tool === this.#tool) { return []; } return this.#reduceChildrenInRange<InlineNode[]>( start, end, (acc, child, childStart, childEnd) => { acc.push(...child.format(tool, childStart, childEnd, data)); return acc; }, [] ); } /** * Iterates through children in range and calls callback for each * * @param start - range start char index * @param end - range end char index * @param callback - callback to apply on children * @param initialValue - initial accumulator value * @private */ #reduceChildrenInRange<Acc>( start: number, end: number, callback: (acc: Acc, child: InlineNode, start: number, end: number) => Acc, initialValue: Acc ): Acc { let result = initialValue; for (const child of this.children) { if (start < child.length && end > 0 && start < end) { result = callback(result, child, Math.max(start, 0), Math.min(child.length, end)); } start -= child.length; end -= child.length; } return result; } /** * Returns child by passed text index * * @param index - char index * @private */ #findChildByIndex(index: number): [child: InlineNode & ChildNode | null, offset: number] { let totalLength = 0; for (const child of this.children) { if (index <= child.length + totalLength) { return [child, totalLength]; } totalLength += child.length; } return [null, totalLength]; } }
src/entities/FormattingNode/index.ts
editor-js-document-model-4cb9623
[ { "filename": "src/entities/TextNode/index.ts", "retrieved_chunk": " constructor({ value = '' }: TextNodeConstructorParameters = {}) {\n this.#value = value;\n }\n /**\n * Returns length of the text\n */\n public get length(): number {\n return this.#value.length;\n }\n /**", "score": 0.9030526280403137 }, { "filename": "src/entities/EditorDocument/index.ts", "retrieved_chunk": " * @param args.properties - The properties of the document.\n */\n constructor({ children, properties }: EditorDocumentConstructorParameters) {\n this.#children = children;\n this.#properties = properties;\n }\n /**\n * Returns count of child BlockNodes of the EditorDocument.\n */\n public get length(): number {", "score": 0.8813037872314453 }, { "filename": "src/entities/interfaces/InlineNode.ts", "retrieved_chunk": "import { InlineToolData, InlineToolName } from '../FormattingNode';\n/**\n * Interface describing abstract InlineNode\n */\nexport interface InlineNode {\n /**\n * Text length of node and it's subtree\n */\n length: number;\n /**", "score": 0.8592257499694824 }, { "filename": "src/entities/TextNode/index.ts", "retrieved_chunk": " public format(tool: InlineToolName, start: number, end: number, data?: InlineToolData): InlineNode[] {\n this.#validateIndex(start);\n this.#validateIndex(end);\n const formattingNode = new FormattingNode({\n tool,\n data,\n });\n const fragments: ChildNode[] = [];\n /**\n * If start index is greater than 0, we need extract part of the text before the start index", "score": 0.8573377728462219 }, { "filename": "src/entities/BlockNode/index.ts", "retrieved_chunk": " */\n constructor({ name, data, parent, tunes = {} }: BlockNodeConstructorParameters) {\n this.#name = name;\n this.#data = data;\n this.#parent = parent;\n this.#tunes = tunes;\n }\n /**\n * Returns serialized object representing the BlockNode\n */", "score": 0.8500854969024658 } ]
typescript
return this.children.reduce((sum, child) => sum + child.length, 0);
import { createBlockTuneName } from './index'; import { createBlockTuneMock } from '../../utils/mocks/createBlockTuneMock'; describe('BlockTune', () => { describe('.update()', () => { it('should add field to data object by key if it doesn\'t exist', () => { // Arrange const blockTune = createBlockTuneMock({ data: {}, }); // Act blockTune.update('align', 'left'); // Assert expect(blockTune.serialized.data).toEqual({ align: 'left', }); }); it('should update field in data object by key', () => { // Arrange const blockTune = createBlockTuneMock({ data: { align: 'center', }, }); // Act blockTune.update('align', 'right'); // Assert expect(blockTune.serialized.data).toEqual({ align: 'right', }); }); }); describe('.serialized', () => { it('should return serialized version of the BlockTune', () => { // Arrange const tune = createBlockTuneMock({ name:
createBlockTuneName('styling'), data: {
background: 'transparent', }, }); // Act const tuneSerialized = tune.serialized; // Assert expect(tuneSerialized).toEqual( { name: 'styling', data: { background: 'transparent', }, } ); }); }); });
src/entities/BlockTune/BlockTune.spec.ts
editor-js-document-model-4cb9623
[ { "filename": "src/entities/BlockNode/BlockNode.spec.ts", "retrieved_chunk": " jest.mock('../ValueNode', () => ({\n ValueNode: jest.fn().mockImplementation(() => ({}) as ValueNode),\n serialized: jest.fn(),\n }));\n });\n afterEach(() => {\n jest.clearAllMocks();\n });\n it('should return a name of a tool that created a BlockNode', () => {\n const blockNodeName = createBlockNodeName('paragraph');", "score": 0.8883904218673706 }, { "filename": "src/entities/BlockNode/BlockNode.spec.ts", "retrieved_chunk": " const blockNode = new BlockNode({\n name: blockNodeName,\n data: {},\n parent: {} as EditorDocument,\n });\n const serialized = blockNode.serialized;\n expect(serialized.name).toEqual(blockNodeName);\n });\n it('should call .serialized getter of all tunes associated with the BlockNode', () => {\n const blockTunesNames = [", "score": 0.8809465169906616 }, { "filename": "src/entities/EditorDocument/EditorDocument.spec.ts", "retrieved_chunk": " });\n describe('.addBlock()', () => {\n it('should add the block to the end of the document if index is not provided', () => {\n // Arrange\n const document = createEditorDocumentMockWithSomeBlocks();\n const block = createBlockNodeMock({\n parent: document,\n });\n // Act\n document.addBlock(block);", "score": 0.8614003658294678 }, { "filename": "src/entities/interfaces/ParentNode.spec.ts", "retrieved_chunk": " it('should append passed children to new parent', () => {\n const childMock = createChildMock();\n dummy = new Dummy({\n children: [ childMock ],\n });\n expect(childMock.appendTo).toBeCalledWith(dummy);\n });\n });\n describe('.children', () => {\n it('should return empty array by default', () => {", "score": 0.8534095287322998 }, { "filename": "src/entities/BlockNode/BlockNode.spec.ts", "retrieved_chunk": " .map((blockTune) => {\n return jest.spyOn(blockTune as BlockTune, 'serialized', 'get');\n });\n const blockNode = new BlockNode({\n name: createBlockNodeName('paragraph'),\n data: {},\n parent: {} as EditorDocument,\n tunes: blockTunes,\n });\n blockNode.serialized;", "score": 0.8519625067710876 } ]
typescript
createBlockTuneName('styling'), data: {
import { describe, it, expect, beforeEach } from '@jest/globals'; import { TextNode } from './index'; import { createInlineToolName, FormattingNode } from '../FormattingNode'; import type { ParentNode } from '../interfaces'; describe('TextNode', () => { const initialText = 'initial text'; const text = 'some text'; const parentMock = { insertAfter: jest.fn(), removeChild: jest.fn(), append: jest.fn(), children: [], } as unknown as ParentNode; let node: TextNode; beforeEach(() => { node = new TextNode({ value: initialText, parent: parentMock as FormattingNode, }); }); it('should have empty value by default', () => { node = new TextNode(); expect(node.getText()).toEqual(''); }); describe('.insertText()', () => { it('should set text to value if node is empty', () => { node = new TextNode(); node.insertText(text); expect(node.getText()).toEqual(text); }); it('should append text if not empty', () => { node.insertText(text); expect(node.getText()).toEqual(initialText + text); }); it('should prepend text if index is 0 and node is not empty', () => { node.insertText(text, 0); expect(node.getText()).toEqual(text + initialText); }); it('should insert text at index if not empty', () => { const index = 5; node.insertText(text, index); expect(node.getText()).toEqual(initialText.slice(0, index) + text + initialText.slice(index)); }); it('should throw an error if index is less than 0', () => { const f = (): void => node.insertText(text, -1); expect(f).toThrowError(); }); it('should throw an error if index is greater than node length', () => { const f = (): void => node.insertText(text, initialText.length + 1); expect(f).toThrowError(); }); }); describe('.getText()', () => { it('should return sliced value if start provided', () => { const start = 5; expect(node.getText(start)).toEqual(initialText.slice(start)); }); it('should return sliced value if end provided', () => { const end = 6; expect(node.getText(0, end)).toEqual(initialText.slice(0, end)); }); it('should return sliced value if full range provided', () => { const start = 3; const end = 9; expect(node.getText(start, end)).toEqual(initialText.slice(start, end)); }); it('should throw an error if start is invalid index', () => { expect(() => node.getText(-1)).toThrowError(); expect(() => node.getText(initialText.length + 1)).toThrowError(); }); it('should throw an error if end is invalid index', () => { expect(() => node.getText(0, initialText.length + 1)).toThrowError(); }); it('should throw an error if end index is greater than start index', () => { const start = 5; const end = 3; expect(() => node.getText(start, end)).toThrowError(); }); it('should not throw an error if end index is equal to start index', () => { const start = 5; const end = 5; expect(() => node.getText(start, end)).not.toThrowError(); }); }); describe('.removeText()', () => { it('should remove all text by default', () => { node.removeText(); expect(node.getText()).toEqual(''); }); it('should remove text from specified index', () => { const start = 3; node.removeText(start); expect(node.getText()).toEqual(initialText.slice(0, start)); }); it('should remove text from 0 to specified end index', () => { const end = 8; node.removeText(0, end); expect(node.getText()).toEqual(initialText.slice(end)); }); it('should remove text from specified start and end indecies', () => { const start = 3; const end = 8; node.removeText(start, end); expect(node.getText()).toEqual(initialText.slice(0, start) + initialText.slice(end)); }); it('should call remove() method if node is empty after removeText() call', () => { jest.spyOn(node, 'remove'); node.removeText(); expect(node.remove).toBeCalled(); }); }); describe('.format()', () => { it('should return just one FormattingNode, if formatting full TextNode', () => {
const name = createInlineToolName('bold');
const fragments = node.format(name, 0, initialText.length); expect(fragments).toHaveLength(1); expect(fragments[0]).toBeInstanceOf(FormattingNode); }); it('should return two fragments if formatting from the start, but not to the end', () => { const name = createInlineToolName('bold'); const end = 5; const fragments = node.format(name, 0, end); expect(fragments).toHaveLength(2); expect(fragments[0]).toBeInstanceOf(FormattingNode); expect(fragments[1]).toBeInstanceOf(TextNode); }); it('should return two fragments if formatting to the end, but not from the start', () => { const name = createInlineToolName('bold'); const start = 5; const fragments = node.format(name, start, initialText.length); expect(fragments).toHaveLength(2); expect(fragments[0]).toBeInstanceOf(TextNode); expect(fragments[1]).toBeInstanceOf(FormattingNode); }); it('should return three fragments if formatting in the middle', () => { const name = createInlineToolName('bold'); const start = 5; const end = 8; const fragments = node.format(name, start, end); // eslint-disable-next-line @typescript-eslint/no-magic-numbers expect(fragments).toHaveLength(3); expect(fragments[0]).toBeInstanceOf(TextNode); expect(fragments[1]).toBeInstanceOf(FormattingNode); expect(fragments[2]).toBeInstanceOf(TextNode); }); it('should return FormattingNode with a TextNode as a child with correct text value', () => { const name = createInlineToolName('bold'); const start = 5; const end = 8; const fragments = node.format(name, start, end); const formattingNode = fragments[1] as FormattingNode; expect(formattingNode.children[0].getText()).toEqual(initialText.slice(start, end)); }); it('should call parent\'s insertAfter with new nodes', () => { const name = createInlineToolName('bold'); const start = 5; const end = 8; const fragments = node.format(name, start, end); expect(parentMock.insertAfter).toBeCalledWith(node, ...fragments); }); }); describe('.split()', () => { const index = 5; it('should not split (return null) if index is 0', () => { const newNode = node.split(0); expect(newNode).toBeNull(); }); it('should not split (return null) if index equals text length', () => { const newNode = node.split(initialText.length); expect(newNode).toBeNull(); }); it('should create new TextNode on split', () => { const newNode = node.split(index); expect(newNode).toBeInstanceOf(TextNode); }); it('should create new TextNode with text value splitted from the original one', () => { const newNode = node.split(index); expect(newNode?.getText()).toEqual(initialText.slice(index)); }); it('should remove split text value from the original node', () => { node.split(index); expect(node.getText()).toEqual(initialText.slice(0, index)); }); it('should insert new node to the parent', () => { const newNode = node.split(index); expect(parentMock.insertAfter).toBeCalledWith(node, newNode); }); }); describe('.serialized', () => { it('should return text value and empty array of fragments', () => { const result = node.serialized; expect(result).toEqual({ text: initialText, fragments: [], }); }); }); });
src/entities/TextNode/TextNode.spec.ts
editor-js-document-model-4cb9623
[ { "filename": "src/entities/FormattingNode/FormattingNode.spec.ts", "retrieved_chunk": " const newNode = node.split(index);\n expect(parentMock.insertAfter).toBeCalledWith(node, newNode);\n });\n });\n describe('.format()', () => {\n const start = 3;\n const end = 5;\n it('should apply formatting to the relevant child', () => {\n node.format(anotherTool, start, end);\n expect(childMock.format).toBeCalledWith(anotherTool, start, end, undefined);", "score": 0.8723085522651672 }, { "filename": "src/entities/FormattingNode/FormattingNode.spec.ts", "retrieved_chunk": " });\n });\n describe('.removeText()', () => {\n const start = 3;\n const end = 5;\n it('should remove text from appropriate child', () => {\n node.removeText(start, end);\n expect(childMock.removeText).toBeCalledWith(start, end);\n });\n it('should adjust indices by child offset', () => {", "score": 0.8721542954444885 }, { "filename": "src/entities/BlockNode/BlockNode.spec.ts", "retrieved_chunk": " },\n parent: {} as EditorDocument,\n });\n blockNode.serialized;\n spyArray.forEach((spy) => {\n expect(spy).toHaveBeenCalled();\n });\n });\n it('should call .serialized getter of all child TextNodes associated with the BlockNode', () => {\n const countOfTextNodes = 3;", "score": 0.8709128499031067 }, { "filename": "src/entities/FormattingNode/FormattingNode.spec.ts", "retrieved_chunk": " it('should not split (return null) if index equals text length', () => {\n const newNode = node.split(node.length);\n expect(newNode).toBeNull();\n });\n it('should create new FormattingNode on split', () => {\n const newNode = node.split(index);\n expect(newNode).toBeInstanceOf(FormattingNode);\n });\n /**\n * @todo check this and related cases with integration tests", "score": 0.8628918528556824 }, { "filename": "src/entities/FormattingNode/FormattingNode.spec.ts", "retrieved_chunk": " */\n it('should create new FormattingNode with children split from the original one', () => {\n const newNode = node.split(childMock.length);\n expect(newNode?.children).toEqual([ anotherChildMock ]);\n });\n it('should call split method of child containing the specified index', () => {\n node.split(index);\n expect(childMock.split).toBeCalledWith(index);\n });\n it('should insert new node to the parent', () => {", "score": 0.8622616529464722 } ]
typescript
const name = createInlineToolName('bold');
import { BlockNode } from './index'; import { createBlockNodeName, createDataKey } from './types'; import { BlockTune, createBlockTuneName } from '../BlockTune'; import { TextNode } from '../TextNode'; import { ValueNode } from '../ValueNode'; import type { EditorDocument } from '../EditorDocument'; import type { BlockTuneConstructorParameters } from '../BlockTune/types'; import type { TextNodeConstructorParameters } from '../TextNode'; import type { ValueNodeConstructorParameters } from '../ValueNode'; describe('BlockNode', () => { describe('.serialized', () => { beforeEach(() => { jest.mock('../BlockTune', () => ({ BlockTune: jest.fn().mockImplementation(() => ({}) as BlockTune), serialized: jest.fn(), })); jest.mock('../TextNode', () => ({ TextNode: jest.fn().mockImplementation(() => ({}) as TextNode), serialized: jest.fn(), })); jest.mock('../ValueNode', () => ({ ValueNode: jest.fn().mockImplementation(() => ({}) as ValueNode), serialized: jest.fn(), })); }); afterEach(() => { jest.clearAllMocks(); }); it('should return a name of a tool that created a BlockNode', () => { const blockNodeName = createBlockNodeName('paragraph'); const blockNode = new BlockNode({ name: blockNodeName, data: {}, parent: {} as EditorDocument, }); const serialized = blockNode.serialized; expect(serialized.name).toEqual(blockNodeName); }); it('should call .serialized getter of all tunes associated with the BlockNode', () => { const blockTunesNames = [ createBlockTuneName('align'), createBlockTuneName('font-size'), createBlockTuneName('font-weight'), ]; const blockTunes = blockTunesNames.reduce((acc, name) => ({ ...acc, [name]: new BlockTune({} as
BlockTuneConstructorParameters), }), {});
const spyArray = Object .values(blockTunes) .map((blockTune) => { return jest.spyOn(blockTune as BlockTune, 'serialized', 'get'); }); const blockNode = new BlockNode({ name: createBlockNodeName('paragraph'), data: {}, parent: {} as EditorDocument, tunes: blockTunes, }); blockNode.serialized; spyArray.forEach((spy) => { expect(spy).toHaveBeenCalled(); }); }); it('should call .serialized getter of all child ValueNodes associated with the BlockNode', () => { const countOfValueNodes = 2; const valueNodes = [ ...Array(countOfValueNodes).keys() ] .reduce((acc, index) => ({ ...acc, [createDataKey(`data-key-${index}c${index}d`)]: new ValueNode({} as ValueNodeConstructorParameters), }), {}); const spyArray = Object .values(valueNodes) .map((valueNode) => { return jest.spyOn(valueNode as ValueNode, 'serialized', 'get'); }); const blockNode = new BlockNode({ name: createBlockNodeName('paragraph'), data: { ...valueNodes, }, parent: {} as EditorDocument, }); blockNode.serialized; spyArray.forEach((spy) => { expect(spy).toHaveBeenCalled(); }); }); it('should call .serialized getter of all child TextNodes associated with the BlockNode', () => { const countOfTextNodes = 3; const textNodes = [ ...Array(countOfTextNodes).keys() ] .map(() => new TextNode({} as TextNodeConstructorParameters)); const spyArray = textNodes .map((textNode) => { return jest.spyOn(textNode, 'serialized', 'get'); }); const blockNode = new BlockNode({ name: createBlockNodeName('paragraph'), data: { [createDataKey('data-key-1a2b')]: textNodes, }, parent: {} as EditorDocument, }); blockNode.serialized; spyArray.forEach((spy) => { expect(spy).toHaveBeenCalled(); }); }); }); });
src/entities/BlockNode/BlockNode.spec.ts
editor-js-document-model-4cb9623
[ { "filename": "src/utils/mocks/createBlockTuneMock.ts", "retrieved_chunk": " data?: Record<string, unknown>,\n}): BlockTune {\n return new BlockTune(\n {\n name: name || createBlockTuneName('aligning'),\n data: data || {},\n }\n );\n}", "score": 0.8417555093765259 }, { "filename": "src/utils/mocks/createBlockNodeMock.ts", "retrieved_chunk": " */\nexport function createBlockNodeMock({ name, parent, tunes, data }: { name?: BlockNodeName, parent: EditorDocument, data?: BlockNodeData, tunes?: Record<BlockTuneName, BlockTune> }): BlockNode {\n return new BlockNode({\n name: name || createBlockNodeName('header'),\n parent,\n data: data || {},\n tunes: tunes || {},\n });\n}", "score": 0.8039686679840088 }, { "filename": "src/entities/BlockNode/index.ts", "retrieved_chunk": " .entries(this.#tunes)\n .reduce(\n (acc, [name, tune]) => {\n acc[name] = tune.serialized;\n return acc;\n },\n {} as Record<string, BlockTuneSerialized>\n );\n return {\n name: this.#name,", "score": 0.8032771348953247 }, { "filename": "src/entities/BlockTune/BlockTune.spec.ts", "retrieved_chunk": " });\n describe('.serialized', () => {\n it('should return serialized version of the BlockTune', () => {\n // Arrange\n const tune = createBlockTuneMock({\n name: createBlockTuneName('styling'),\n data: {\n background: 'transparent',\n },\n });", "score": 0.7813324332237244 }, { "filename": "src/utils/mocks/createBlockTuneMock.ts", "retrieved_chunk": "import { BlockTune, BlockTuneName, createBlockTuneName } from '../../entities/BlockTune';\n/**\n * Creates a BlockTune instance with the given name and data.\n *\n * @param args - BlockTune constructor arguments.\n * @param args.name - The name of the tune.\n * @param args.data - Any additional data associated with the tune.\n */\nexport function createBlockTuneMock({ name, data }: {\n name?: BlockTuneName,", "score": 0.7757768630981445 } ]
typescript
BlockTuneConstructorParameters), }), {});
import { EditorDocument } from './index'; import { BlockNode } from '../BlockNode'; import { createBlockNodeMock } from '../../utils/mocks/createBlockNodeMock'; import { createEditorDocumentMock } from '../../utils/mocks/createEditorDocumentMock'; /** * Creates an EditorDocument object with some blocks for tests. */ function createEditorDocumentMockWithSomeBlocks(): EditorDocument { const document = createEditorDocumentMock(); const countOfBlocks = 3; for (let i = 0; i < countOfBlocks; i++) { const block = createBlockNodeMock({ parent: document, }); document.addBlock(block); } return document; } describe('EditorDocument', () => { describe('.length', () => { it('should return the number of blocks in the document', () => { // Arrange const blocksCount = 3; const document = new EditorDocument({ children: [], properties: { readOnly: false, }, }); for (let i = 0; i < blocksCount; i++) { const block = createBlockNodeMock({ parent: document, }); document.addBlock(block); } // Act const actual = document.length; // Assert expect(actual).toBe(blocksCount); }); }); describe('.addBlock()', () => { it('should add the block to the end of the document if index is not provided', () => { // Arrange const document = createEditorDocumentMockWithSomeBlocks(); const block = createBlockNodeMock({ parent: document, }); // Act document.addBlock(block); // Assert const lastBlock = document.getBlock(document.length - 1); expect(lastBlock).toBe(block); }); it('should add the block to the beginning of the document if index is 0', () => { // Arrange const document = createEditorDocumentMockWithSomeBlocks(); const block = createBlockNodeMock({ parent: document, }); // Act document.addBlock(block, 0); // Assert expect(document.getBlock(0)).toBe(block); }); it('should add the block to the specified index in the middle of the document', () => { // Arrange const document = createEditorDocumentMockWithSomeBlocks(); const block = createBlockNodeMock({ parent: document, }); // Act document.addBlock(block, 1); // Assert expect(document.getBlock(1)).toBe(block); }); it('should add the block to the end of the document if the index after the last element is passed', () => { // Arrange const document = createEditorDocumentMockWithSomeBlocks(); const block = createBlockNodeMock({ parent: document, }); // Act document.addBlock(block, document.length); // Assert const lastBlock = document.getBlock(document.length - 1); expect(lastBlock).toBe(block); }); it('should throw an error if index is less then 0', () => { // Arrange const document = createEditorDocumentMockWithSomeBlocks(); const block = createBlockNodeMock({ parent: document, }); // Act const action = (): void => document.addBlock(block, -1); // Assert expect(action).toThrowError('Index out of bounds'); }); it('should throw an error if index is greater then document length', () => { // Arrange const document = createEditorDocumentMockWithSomeBlocks(); const block = createBlockNodeMock({ parent: document, }); // Act const action = (): void => document.addBlock(block, document.length + 1); // Assert expect(action).toThrowError('Index out of bounds'); }); }); describe('.removeBlock()', () => { it('should remove the block from the beginning of the document if index 0 is passed', () => { // Arrange const document = createEditorDocumentMockWithSomeBlocks(); const block = document.getBlock(0); // Act document.removeBlock(0); // Assert expect(document.getBlock(0)).not.toBe(block); }); it('should remove the block from the specified index in the middle of the document', () => { // Arrange const document = createEditorDocumentMockWithSomeBlocks(); const block = document.getBlock(1); // Act document.removeBlock(1); // Assert expect(document.getBlock(1)).not.toBe(block); }); it('should remove the block from the end of the document if the last index is passed', () => { // Arrange const document = createEditorDocumentMockWithSomeBlocks(); const documentLengthBeforeRemove = document.length; // Act document.removeBlock(document.length - 1); // Assert expect(document.length).toBe(documentLengthBeforeRemove - 1); }); it('should throw an error if index is less then 0', () => { // Arrange const document = createEditorDocumentMockWithSomeBlocks(); // Act const action = (): void => document.removeBlock(-1); // Assert expect(action).toThrowError('Index out of bounds'); }); it('should throw an error if index is greater then document length', () => { // Arrange const document = createEditorDocumentMockWithSomeBlocks(); // Act const action = (): void => document.removeBlock(document.length); // Assert expect(action).toThrowError('Index out of bounds'); }); }); describe('.getBlock()', () => { it('should return the block from the specific index', () => { // Arrange const document = createEditorDocumentMock(); const countOfBlocks = 3;
const blocks: BlockNode[] = [];
for (let i = 0; i < countOfBlocks; i++) { const block = createBlockNodeMock({ parent: document, }); document.addBlock(block); blocks.push(block); } const index = 1; // Act const block = document.getBlock(index); // Assert expect(block).toBe(blocks[index]); }); it('should throw an error if index is less then 0', () => { // Arrange const document = createEditorDocumentMockWithSomeBlocks(); // Act const action = (): BlockNode => document.getBlock(-1); // Assert expect(action).toThrowError('Index out of bounds'); }); it('should throw an error if index is greater then document length', () => { // Arrange const document = createEditorDocumentMockWithSomeBlocks(); // Act const action = (): BlockNode => document.getBlock(document.length); // Assert expect(action).toThrowError('Index out of bounds'); }); }); });
src/entities/EditorDocument/EditorDocument.spec.ts
editor-js-document-model-4cb9623
[ { "filename": "src/entities/FormattingNode/FormattingNode.spec.ts", "retrieved_chunk": " },\n ]);\n });\n });\n describe('.split()', () => {\n const index = 5;\n it('should not split (return null) if index is 0', () => {\n const newNode = node.split(0);\n expect(newNode).toBeNull();\n });", "score": 0.8881029486656189 }, { "filename": "src/entities/BlockNode/BlockNode.spec.ts", "retrieved_chunk": " },\n parent: {} as EditorDocument,\n });\n blockNode.serialized;\n spyArray.forEach((spy) => {\n expect(spy).toHaveBeenCalled();\n });\n });\n it('should call .serialized getter of all child TextNodes associated with the BlockNode', () => {\n const countOfTextNodes = 3;", "score": 0.8826094269752502 }, { "filename": "src/entities/FormattingNode/FormattingNode.spec.ts", "retrieved_chunk": " },\n ],\n });\n });\n });\n describe('.insertText()', () => {\n const newText = 'new text';\n const index = 3;\n it('should lead calling insertText() of the child with the passed index', () => {\n node.insertText(newText, index);", "score": 0.8822572827339172 }, { "filename": "src/entities/TextNode/TextNode.spec.ts", "retrieved_chunk": " it('should throw an error if end is invalid index', () => {\n expect(() => node.getText(0, initialText.length + 1)).toThrowError();\n });\n it('should throw an error if end index is greater than start index', () => {\n const start = 5;\n const end = 3;\n expect(() => node.getText(start, end)).toThrowError();\n });\n it('should not throw an error if end index is equal to start index', () => {\n const start = 5;", "score": 0.8755450248718262 }, { "filename": "src/entities/TextNode/TextNode.spec.ts", "retrieved_chunk": " node.split(index);\n expect(node.getText()).toEqual(initialText.slice(0, index));\n });\n it('should insert new node to the parent', () => {\n const newNode = node.split(index);\n expect(parentMock.insertAfter).toBeCalledWith(node, newNode);\n });\n });\n describe('.serialized', () => {\n it('should return text value and empty array of fragments', () => {", "score": 0.8705147504806519 } ]
typescript
const blocks: BlockNode[] = [];
import { BlockNode } from './index'; import { createBlockNodeName, createDataKey } from './types'; import { BlockTune, createBlockTuneName } from '../BlockTune'; import { TextNode } from '../TextNode'; import { ValueNode } from '../ValueNode'; import type { EditorDocument } from '../EditorDocument'; import type { BlockTuneConstructorParameters } from '../BlockTune/types'; import type { TextNodeConstructorParameters } from '../TextNode'; import type { ValueNodeConstructorParameters } from '../ValueNode'; describe('BlockNode', () => { describe('.serialized', () => { beforeEach(() => { jest.mock('../BlockTune', () => ({ BlockTune: jest.fn().mockImplementation(() => ({}) as BlockTune), serialized: jest.fn(), })); jest.mock('../TextNode', () => ({ TextNode: jest.fn().mockImplementation(() => ({}) as TextNode), serialized: jest.fn(), })); jest.mock('../ValueNode', () => ({ ValueNode: jest.fn().mockImplementation(() => ({}) as ValueNode), serialized: jest.fn(), })); }); afterEach(() => { jest.clearAllMocks(); }); it('should return a name of a tool that created a BlockNode', () => { const blockNodeName = createBlockNodeName('paragraph'); const blockNode = new BlockNode({ name: blockNodeName, data: {}, parent: {} as EditorDocument, }); const serialized = blockNode.serialized; expect(serialized.name).toEqual(blockNodeName); }); it('should call .serialized getter of all tunes associated with the BlockNode', () => { const blockTunesNames = [ createBlockTuneName('align'), createBlockTuneName('font-size'), createBlockTuneName('font-weight'), ]; const blockTunes = blockTunesNames.reduce((acc, name) => ({ ...acc, [name]: new BlockTune({} as BlockTuneConstructorParameters), }), {}); const spyArray = Object .values(blockTunes) .map((blockTune) => { return jest.spyOn(blockTune as BlockTune, 'serialized', 'get'); }); const blockNode = new BlockNode({ name: createBlockNodeName('paragraph'), data: {}, parent: {} as EditorDocument, tunes: blockTunes, }); blockNode.serialized; spyArray.forEach((spy) => { expect(spy).toHaveBeenCalled(); }); }); it('should call .serialized getter of all child ValueNodes associated with the BlockNode', () => { const countOfValueNodes = 2; const valueNodes = [ ...Array(countOfValueNodes).keys() ] .reduce((acc, index) => ({ ...acc,
[createDataKey(`data-key-${index}c${index}d`)]: new ValueNode({} as ValueNodeConstructorParameters), }), {});
const spyArray = Object .values(valueNodes) .map((valueNode) => { return jest.spyOn(valueNode as ValueNode, 'serialized', 'get'); }); const blockNode = new BlockNode({ name: createBlockNodeName('paragraph'), data: { ...valueNodes, }, parent: {} as EditorDocument, }); blockNode.serialized; spyArray.forEach((spy) => { expect(spy).toHaveBeenCalled(); }); }); it('should call .serialized getter of all child TextNodes associated with the BlockNode', () => { const countOfTextNodes = 3; const textNodes = [ ...Array(countOfTextNodes).keys() ] .map(() => new TextNode({} as TextNodeConstructorParameters)); const spyArray = textNodes .map((textNode) => { return jest.spyOn(textNode, 'serialized', 'get'); }); const blockNode = new BlockNode({ name: createBlockNodeName('paragraph'), data: { [createDataKey('data-key-1a2b')]: textNodes, }, parent: {} as EditorDocument, }); blockNode.serialized; spyArray.forEach((spy) => { expect(spy).toHaveBeenCalled(); }); }); }); });
src/entities/BlockNode/BlockNode.spec.ts
editor-js-document-model-4cb9623
[ { "filename": "src/entities/ValueNode/ValueNode.spec.ts", "retrieved_chunk": "import { createValueNodeMock } from '../../utils/mocks/createValueNodeMock';\ndescribe('ValueNode', () => {\n describe('.update()', () => {\n it('should update existing data associated with this value node', () => {\n // Arrange\n const longitudeValueNode = createValueNodeMock({\n value: 23.123,\n });\n const updatedLongitude = 23.456;\n // Act", "score": 0.8065922260284424 }, { "filename": "src/entities/TextNode/TextNode.spec.ts", "retrieved_chunk": " node.split(index);\n expect(node.getText()).toEqual(initialText.slice(0, index));\n });\n it('should insert new node to the parent', () => {\n const newNode = node.split(index);\n expect(parentMock.insertAfter).toBeCalledWith(node, newNode);\n });\n });\n describe('.serialized', () => {\n it('should return text value and empty array of fragments', () => {", "score": 0.8042020201683044 }, { "filename": "src/entities/interfaces/ParentNode.spec.ts", "retrieved_chunk": " expect(dummy.children).toEqual([]);\n });\n it('should return children passed via constructor', () => {\n const childMock = createChildMock();\n dummy = new Dummy({\n children: [ childMock ],\n });\n expect(dummy.children).toEqual([ childMock ]);\n });\n });", "score": 0.8007609844207764 }, { "filename": "src/entities/ValueNode/ValueNode.spec.ts", "retrieved_chunk": " longitudeValueNode.update(updatedLongitude);\n // Assert\n expect(longitudeValueNode.serialized).toBe(updatedLongitude);\n });\n });\n describe('.serialized', () => {\n it('should return the serialized data associated with this value node', () => {\n // Arrange\n const longitude = 23.123;\n const longitudeValueNode = createValueNodeMock({", "score": 0.8003926873207092 }, { "filename": "src/entities/FormattingNode/FormattingNode.spec.ts", "retrieved_chunk": " },\n ],\n });\n });\n });\n describe('.insertText()', () => {\n const newText = 'new text';\n const index = 3;\n it('should lead calling insertText() of the child with the passed index', () => {\n node.insertText(newText, index);", "score": 0.7954666018486023 } ]
typescript
[createDataKey(`data-key-${index}c${index}d`)]: new ValueNode({} as ValueNodeConstructorParameters), }), {});
import { FormattingNodeConstructorParameters, InlineToolName, InlineToolData } from './types'; import { ChildNode, InlineFragment, InlineNode, InlineNodeSerialized, ParentNode } from '../interfaces'; export * from './types'; /** * We need to extend FormattingNode interface with ChildNode and ParentNode ones to use the methods from mixins */ export interface FormattingNode extends ChildNode, ParentNode {} /** * FormattingNode class represents a node in a tree-like structure, used to store and manipulate formatted text content */ @ParentNode @ChildNode export class FormattingNode implements InlineNode { /** * Private field representing the name of the formatting tool applied to the content */ #tool: InlineToolName; /** * Any additional data associated with the formatting tool */ #data?: InlineToolData; /** * Constructor for FormattingNode class. * * @param args - FormattingNode constructor arguments. * @param args.tool - The name of the formatting tool applied to the content. * @param args.data - Any additional data associated with the formatting. */ constructor({ tool, data }: FormattingNodeConstructorParameters) { this.#tool = tool; this.#data = data; } /** * Returns text value length of current node (including subtree) */ public get length(): number { return this
.children.reduce((sum, child) => sum + child.length, 0);
} /** * Returns serialized value of the node: text and formatting fragments */ public get serialized(): InlineNodeSerialized { return { text: this.getText(), fragments: this.getFragments(), }; } /** * Inserts text to the specified index, by default appends text to the end of the current value * * @param text - text to insert * @param [index] - char index where to insert text */ public insertText(text: string, index = this.length): void { const [child, offset] = this.#findChildByIndex(index); child?.insertText(text, index - offset); } /** * Removes text form the specified range * * @param [start] - start char index of the range, by default 0 * @param [end] - end char index of the range, by default length of the text value * @returns {string} removed text */ public removeText(start = 0, end = this.length): string { const result = this.#reduceChildrenInRange( start, end, (acc, child, childStart, childEnd) => { return acc + child.removeText(childStart, childEnd); }, '' ); if (this.length === 0) { this.remove(); } return result; } /** * Returns text from the specified range * * @param [start] - start char index of the range, by default 0 * @param [end] - end char index of the range, by default length of the text value */ public getText(start = 0, end = this.length): string { return this.#reduceChildrenInRange( start, end, (acc, child, childStart, childEnd) => { return acc + child.getText(childStart, childEnd); }, '' ); } /** * Returns inline fragments for subtree including current node from the specified range * * @param [start] - start char index of the range, by default 0 * @param [end] - end char index of the range, by default length of the text value */ public getFragments(start = 0, end = this.length): InlineFragment[] { return this.#reduceChildrenInRange<InlineFragment[]>( start, end, (acc, child, childStart, childEnd) => { /** * If child is not a FormattingNode, it doesn't include any fragments. So we skip it. */ if (!(child instanceof FormattingNode)) { return acc; } acc.push(...child.getFragments(childStart, childEnd)); return acc; }, [ { tool: this.#tool, data: this.#data, range: [start, end], } ] ); } /** * Splits current node by the specified index * * @param index - char index where to split the node * @returns {FormattingNode | null} new node */ public split(index: number): FormattingNode | null { if (index === 0 || index === this.length) { return null; } const newNode = new FormattingNode({ tool: this.#tool, data: this.#data, }); const [child, offset] = this.#findChildByIndex(index); if (!child) { return null; } // Have to save length as it is changed after split const childLength = child.length; const splitNode = child.split(index - offset); let midNodeIndex = this.children.indexOf(child); /** * If node is split or if node is not split but index equals to child length, we should split children from the next node */ if (splitNode || (index - offset === childLength)) { midNodeIndex += 1; } newNode.append(...this.children.slice(midNodeIndex)); this.parent?.insertAfter(this, newNode); return newNode; } /** * Applies formatting to the text with specified inline tool in the specified range * * @param tool - name of inline tool to apply * @param start - char start index of the range * @param end - char end index of the range * @param [data] - inline tool data if applicable */ public format(tool: InlineToolName, start: number, end: number, data?: InlineToolData): InlineNode[] { /** * In case current tool is the same as new one, do nothing */ if (tool === this.#tool) { return []; } return this.#reduceChildrenInRange<InlineNode[]>( start, end, (acc, child, childStart, childEnd) => { acc.push(...child.format(tool, childStart, childEnd, data)); return acc; }, [] ); } /** * Iterates through children in range and calls callback for each * * @param start - range start char index * @param end - range end char index * @param callback - callback to apply on children * @param initialValue - initial accumulator value * @private */ #reduceChildrenInRange<Acc>( start: number, end: number, callback: (acc: Acc, child: InlineNode, start: number, end: number) => Acc, initialValue: Acc ): Acc { let result = initialValue; for (const child of this.children) { if (start < child.length && end > 0 && start < end) { result = callback(result, child, Math.max(start, 0), Math.min(child.length, end)); } start -= child.length; end -= child.length; } return result; } /** * Returns child by passed text index * * @param index - char index * @private */ #findChildByIndex(index: number): [child: InlineNode & ChildNode | null, offset: number] { let totalLength = 0; for (const child of this.children) { if (index <= child.length + totalLength) { return [child, totalLength]; } totalLength += child.length; } return [null, totalLength]; } }
src/entities/FormattingNode/index.ts
editor-js-document-model-4cb9623
[ { "filename": "src/entities/TextNode/index.ts", "retrieved_chunk": " constructor({ value = '' }: TextNodeConstructorParameters = {}) {\n this.#value = value;\n }\n /**\n * Returns length of the text\n */\n public get length(): number {\n return this.#value.length;\n }\n /**", "score": 0.916405439376831 }, { "filename": "src/entities/EditorDocument/index.ts", "retrieved_chunk": " * @param args.properties - The properties of the document.\n */\n constructor({ children, properties }: EditorDocumentConstructorParameters) {\n this.#children = children;\n this.#properties = properties;\n }\n /**\n * Returns count of child BlockNodes of the EditorDocument.\n */\n public get length(): number {", "score": 0.8712203502655029 }, { "filename": "src/entities/interfaces/InlineNode.ts", "retrieved_chunk": "import { InlineToolData, InlineToolName } from '../FormattingNode';\n/**\n * Interface describing abstract InlineNode\n */\nexport interface InlineNode {\n /**\n * Text length of node and it's subtree\n */\n length: number;\n /**", "score": 0.8707658052444458 }, { "filename": "src/entities/interfaces/ParentNode.ts", "retrieved_chunk": " this.#children = children;\n this.children.forEach(child => child.appendTo(this));\n }\n /**\n * Returns node's children\n */\n public get children(): ChildNode[] {\n return this.#children;\n }\n /**", "score": 0.8569397926330566 }, { "filename": "src/entities/TextNode/index.ts", "retrieved_chunk": " public format(tool: InlineToolName, start: number, end: number, data?: InlineToolData): InlineNode[] {\n this.#validateIndex(start);\n this.#validateIndex(end);\n const formattingNode = new FormattingNode({\n tool,\n data,\n });\n const fragments: ChildNode[] = [];\n /**\n * If start index is greater than 0, we need extract part of the text before the start index", "score": 0.8514801263809204 } ]
typescript
.children.reduce((sum, child) => sum + child.length, 0);
import { EditorDocument } from '../EditorDocument'; import { BlockTune, BlockTuneName, BlockTuneSerialized } from '../BlockTune'; import { BlockNodeConstructorParameters, BlockNodeName, createBlockNodeName, DataKey, createDataKey, BlockNodeData, BlockNodeSerialized } from './types'; /** * BlockNode class represents a node in a tree-like structure used to store and manipulate Blocks in an editor document. * A BlockNode can contain one or more child nodes of type TextNode, ValueNode or FormattingNode. * It can also be associated with one or more BlockTunes, which can modify the behavior of the BlockNode. */ export class BlockNode { /** * Field representing a name of the Tool created this Block */ #name: BlockNodeName; /** * Field representing the content of the Block */ #data: BlockNodeData; /** * Field representing the parent EditorDocument of the BlockNode */ #parent: EditorDocument; /** * Private field representing the BlockTunes associated with the BlockNode */ #tunes: Record<BlockTuneName, BlockTune>; /** * Constructor for BlockNode class. * * @param args - TextNode constructor arguments. * @param args.name - The name of the BlockNode. * @param args.data - The content of the BlockNode. * @param args.parent - The parent EditorDocument of the BlockNode. * @param args.tunes - The BlockTunes associated with the BlockNode. */ constructor({ name, data, parent, tunes = {} }: BlockNodeConstructorParameters) { this.#name = name; this.#data = data; this.#parent = parent; this.#tunes = tunes; } /** * Returns serialized object representing the BlockNode */ public get serialized(): BlockNodeSerialized { const serializedData = Object .entries(this.#data) .reduce( (acc, [dataKey, value]) => { /** * If the value is an array, we need to serialize each node in the array * Value is an array if the BlockNode contains TextNodes and FormattingNodes * After serializing there will be InlineNodeSerialized object */ if (value instanceof Array) { acc[dataKey] = value.map((node) => node.serialized); return acc; } acc[dataKey] = value.serialized; return acc; }, {} as Record<string, unknown> ); const serializedTunes = Object .entries(this.#tunes) .reduce( (acc, [name, tune]) => { acc[name] = tune.serialized; return acc; }, {} as Record
<string, BlockTuneSerialized> );
return { name: this.#name, data: serializedData, tunes: serializedTunes, }; } } export { BlockNodeName, createBlockNodeName, DataKey, createDataKey };
src/entities/BlockNode/index.ts
editor-js-document-model-4cb9623
[ { "filename": "src/entities/BlockTune/index.ts", "retrieved_chunk": " */\n public update(key: string, value: unknown): void {\n this.#data[key] = value;\n }\n /**\n * Returns serialized version of the BlockTune.\n */\n public get serialized(): BlockTuneSerialized {\n return {\n name: this.#name,", "score": 0.7905585169792175 }, { "filename": "src/entities/BlockNode/BlockNode.spec.ts", "retrieved_chunk": " createBlockTuneName('align'),\n createBlockTuneName('font-size'),\n createBlockTuneName('font-weight'),\n ];\n const blockTunes = blockTunesNames.reduce((acc, name) => ({\n ...acc,\n [name]: new BlockTune({} as BlockTuneConstructorParameters),\n }), {});\n const spyArray = Object\n .values(blockTunes)", "score": 0.7902390956878662 }, { "filename": "src/entities/BlockTune/BlockTune.spec.ts", "retrieved_chunk": " // Act\n const tuneSerialized = tune.serialized;\n // Assert\n expect(tuneSerialized).toEqual(\n {\n name: 'styling',\n data: {\n background: 'transparent',\n },\n }", "score": 0.7659074664115906 }, { "filename": "src/utils/mocks/createBlockTuneMock.ts", "retrieved_chunk": " data?: Record<string, unknown>,\n}): BlockTune {\n return new BlockTune(\n {\n name: name || createBlockTuneName('aligning'),\n data: data || {},\n }\n );\n}", "score": 0.7624219655990601 }, { "filename": "src/entities/BlockNode/types/BlockNodeSerialized.ts", "retrieved_chunk": " * The content of the Block\n */\n data: Record<string, unknown>; // @todo replace unknown type with serialized root node and value node\n /**\n * Serialized BlockTunes associated with the BlockNode\n */\n tunes: Record<string, BlockTuneSerialized>;\n}", "score": 0.7467033863067627 } ]
typescript
<string, BlockTuneSerialized> );
import { BlockNode } from './index'; import { createBlockNodeName, createDataKey } from './types'; import { BlockTune, createBlockTuneName } from '../BlockTune'; import { TextNode } from '../TextNode'; import { ValueNode } from '../ValueNode'; import type { EditorDocument } from '../EditorDocument'; import type { BlockTuneConstructorParameters } from '../BlockTune/types'; import type { TextNodeConstructorParameters } from '../TextNode'; import type { ValueNodeConstructorParameters } from '../ValueNode'; describe('BlockNode', () => { describe('.serialized', () => { beforeEach(() => { jest.mock('../BlockTune', () => ({ BlockTune: jest.fn().mockImplementation(() => ({}) as BlockTune), serialized: jest.fn(), })); jest.mock('../TextNode', () => ({ TextNode: jest.fn().mockImplementation(() => ({}) as TextNode), serialized: jest.fn(), })); jest.mock('../ValueNode', () => ({ ValueNode: jest.fn().mockImplementation(() => ({}) as ValueNode), serialized: jest.fn(), })); }); afterEach(() => { jest.clearAllMocks(); }); it('should return a name of a tool that created a BlockNode', () => { const blockNodeName = createBlockNodeName('paragraph'); const blockNode = new BlockNode({ name: blockNodeName, data: {}, parent: {} as EditorDocument, }); const serialized = blockNode.serialized; expect(serialized.name).toEqual(blockNodeName); }); it('should call .serialized getter of all tunes associated with the BlockNode', () => { const blockTunesNames = [ createBlockTuneName('align'), createBlockTuneName('font-size'), createBlockTuneName('font-weight'), ]; const blockTunes = blockTunesNames.reduce((acc, name) => ({ ...acc, [name]: new BlockTune({} as BlockTuneConstructorParameters), }), {}); const spyArray = Object .values(blockTunes) .map((blockTune) => { return jest.spyOn(blockTune as BlockTune, 'serialized', 'get'); }); const blockNode = new BlockNode({ name: createBlockNodeName('paragraph'), data: {}, parent: {} as EditorDocument, tunes: blockTunes, }); blockNode.serialized; spyArray.forEach((spy) => { expect(spy).toHaveBeenCalled(); }); }); it('should call .serialized getter of all child ValueNodes associated with the BlockNode', () => { const countOfValueNodes = 2; const valueNodes = [ ...Array(countOfValueNodes).keys() ] .reduce((acc, index) => ({ ...acc, [createDataKey(`data-key-${index}c${index}d`)]: new ValueNode({} as ValueNodeConstructorParameters), }), {}); const spyArray = Object .values(valueNodes) .map((valueNode) => { return jest.spyOn(valueNode as ValueNode, 'serialized', 'get'); }); const blockNode = new BlockNode({ name: createBlockNodeName('paragraph'), data: { ...valueNodes, }, parent: {} as EditorDocument, }); blockNode.serialized; spyArray.forEach((spy) => { expect(spy).toHaveBeenCalled(); }); }); it('should call .serialized getter of all child TextNodes associated with the BlockNode', () => { const countOfTextNodes = 3; const textNodes = [ ...Array(countOfTextNodes).keys() ] .map(() => new TextNode(
{} as TextNodeConstructorParameters));
const spyArray = textNodes .map((textNode) => { return jest.spyOn(textNode, 'serialized', 'get'); }); const blockNode = new BlockNode({ name: createBlockNodeName('paragraph'), data: { [createDataKey('data-key-1a2b')]: textNodes, }, parent: {} as EditorDocument, }); blockNode.serialized; spyArray.forEach((spy) => { expect(spy).toHaveBeenCalled(); }); }); }); });
src/entities/BlockNode/BlockNode.spec.ts
editor-js-document-model-4cb9623
[ { "filename": "src/entities/FormattingNode/FormattingNode.spec.ts", "retrieved_chunk": " removeSpy.mockRestore();\n lengthSpy.mockRestore();\n });\n });\n describe('.getText()', () => {\n const start = 3;\n const end = 5;\n it('should call getText() for the relevant child', () => {\n node.getText(start, end);\n expect(childMock.getText).toBeCalledWith(start, end);", "score": 0.8691521883010864 }, { "filename": "src/entities/TextNode/TextNode.spec.ts", "retrieved_chunk": " node.split(index);\n expect(node.getText()).toEqual(initialText.slice(0, index));\n });\n it('should insert new node to the parent', () => {\n const newNode = node.split(index);\n expect(parentMock.insertAfter).toBeCalledWith(node, newNode);\n });\n });\n describe('.serialized', () => {\n it('should return text value and empty array of fragments', () => {", "score": 0.8681150674819946 }, { "filename": "src/entities/TextNode/TextNode.spec.ts", "retrieved_chunk": " const formattingNode = fragments[1] as FormattingNode;\n expect(formattingNode.children[0].getText()).toEqual(initialText.slice(start, end));\n });\n it('should call parent\\'s insertAfter with new nodes', () => {\n const name = createInlineToolName('bold');\n const start = 5;\n const end = 8;\n const fragments = node.format(name, start, end);\n expect(parentMock.insertAfter).toBeCalledWith(node, ...fragments);\n });", "score": 0.8503662943840027 }, { "filename": "src/entities/FormattingNode/FormattingNode.spec.ts", "retrieved_chunk": " */\n it('should create new FormattingNode with children split from the original one', () => {\n const newNode = node.split(childMock.length);\n expect(newNode?.children).toEqual([ anotherChildMock ]);\n });\n it('should call split method of child containing the specified index', () => {\n node.split(index);\n expect(childMock.split).toBeCalledWith(index);\n });\n it('should insert new node to the parent', () => {", "score": 0.8495043516159058 }, { "filename": "src/entities/FormattingNode/FormattingNode.spec.ts", "retrieved_chunk": " });\n });\n describe('.removeText()', () => {\n const start = 3;\n const end = 5;\n it('should remove text from appropriate child', () => {\n node.removeText(start, end);\n expect(childMock.removeText).toBeCalledWith(start, end);\n });\n it('should adjust indices by child offset', () => {", "score": 0.8471959829330444 } ]
typescript
{} as TextNodeConstructorParameters));
import { BlockTuneConstructorParameters, BlockTuneName, BlockTuneSerialized, createBlockTuneName } from './types'; /** * BlockTune class represents a set of additional information associated with a BlockNode. * This information can be used by a BlockTool to modify the behavior of the BlockNode. */ export class BlockTune { /** * Private field representing the name of the tune */ #name: BlockTuneName; /** * Private field representing any additional data associated with the tune */ #data: Record<string, unknown>; /** * Constructor for BlockTune class. * * @param args - BlockTune constructor arguments. * @param args.name - The name of the tune. * @param args.data - Any additional data associated with the tune. */ constructor({ name, data }: BlockTuneConstructorParameters) { this.#name = name; this.#data = data; } /** * Updates data associated with the tune. * * @param key - The key of the data to update * @param value - The value to update the data with */ public update(key: string, value: unknown): void { this.#data[key] = value; } /** * Returns serialized version of the BlockTune. */ public get serialized()
: BlockTuneSerialized {
return { name: this.#name, data: this.#data, }; } } export { BlockTuneName, createBlockTuneName }; export type { BlockTuneSerialized };
src/entities/BlockTune/index.ts
editor-js-document-model-4cb9623
[ { "filename": "src/entities/ValueNode/index.ts", "retrieved_chunk": " /**\n * Updates the data associated with this value node.\n *\n * @param value - The new value of this value node.\n */\n public update(value: ValueType): void {\n this.#value = value;\n }\n /**\n * Returns serialized data associated with this value node.", "score": 0.903389573097229 }, { "filename": "src/entities/BlockTune/types/BlockTuneSerialized.ts", "retrieved_chunk": "/**\n * BlockTuneSerialized represents a serialized version of a BlockTune.\n */\nexport interface BlockTuneSerialized {\n /**\n * The name of the tune.\n * Serialized as a string.\n */\n name: string;\n /**", "score": 0.8927831053733826 }, { "filename": "src/entities/BlockTune/types/BlockTuneConstructorParameters.ts", "retrieved_chunk": "import { BlockTuneName } from './BlockTuneName';\nexport interface BlockTuneConstructorParameters {\n /**\n * The name of the tune\n */\n name: BlockTuneName;\n /**\n * Any additional data associated with the tune\n */\n data: Record<string, unknown>;", "score": 0.8862134218215942 }, { "filename": "src/entities/BlockNode/types/BlockNodeSerialized.ts", "retrieved_chunk": " * The content of the Block\n */\n data: Record<string, unknown>; // @todo replace unknown type with serialized root node and value node\n /**\n * Serialized BlockTunes associated with the BlockNode\n */\n tunes: Record<string, BlockTuneSerialized>;\n}", "score": 0.8856236338615417 }, { "filename": "src/entities/BlockNode/types/BlockNodeSerialized.ts", "retrieved_chunk": "import { BlockTuneSerialized } from '../../BlockTune';\n/**\n * Serialized version of the BlockNode\n */\nexport interface BlockNodeSerialized {\n /**\n * The name of the tool created a Block\n */\n name: string;\n /**", "score": 0.875124454498291 } ]
typescript
: BlockTuneSerialized {
import { BlockNode } from './index'; import { createBlockNodeName, createDataKey } from './types'; import { BlockTune, createBlockTuneName } from '../BlockTune'; import { TextNode } from '../TextNode'; import { ValueNode } from '../ValueNode'; import type { EditorDocument } from '../EditorDocument'; import type { BlockTuneConstructorParameters } from '../BlockTune/types'; import type { TextNodeConstructorParameters } from '../TextNode'; import type { ValueNodeConstructorParameters } from '../ValueNode'; describe('BlockNode', () => { describe('.serialized', () => { beforeEach(() => { jest.mock('../BlockTune', () => ({ BlockTune: jest.fn().mockImplementation(() => ({}) as BlockTune), serialized: jest.fn(), })); jest.mock('../TextNode', () => ({ TextNode: jest.fn().mockImplementation(() => ({}) as TextNode), serialized: jest.fn(), })); jest.mock('../ValueNode', () => ({ ValueNode: jest.fn().mockImplementation(() => ({}) as ValueNode), serialized: jest.fn(), })); }); afterEach(() => { jest.clearAllMocks(); }); it('should return a name of a tool that created a BlockNode', () => { const blockNodeName = createBlockNodeName('paragraph'); const blockNode = new BlockNode({ name: blockNodeName, data: {}, parent: {} as EditorDocument, }); const serialized = blockNode.serialized; expect(serialized.name).toEqual(blockNodeName); }); it('should call .serialized getter of all tunes associated with the BlockNode', () => { const blockTunesNames = [ createBlockTuneName('align'), createBlockTuneName('font-size'), createBlockTuneName('font-weight'), ]; const blockTunes = blockTunesNames.reduce((acc, name) => ({ ...acc, [name]: new BlockTune({} as BlockTuneConstructorParameters), }), {}); const spyArray = Object .values(blockTunes) .map((blockTune) => { return jest.spyOn(blockTune as BlockTune, 'serialized', 'get'); }); const blockNode = new BlockNode({
name: createBlockNodeName('paragraph'), data: {}, parent: {} as EditorDocument, tunes: blockTunes, });
blockNode.serialized; spyArray.forEach((spy) => { expect(spy).toHaveBeenCalled(); }); }); it('should call .serialized getter of all child ValueNodes associated with the BlockNode', () => { const countOfValueNodes = 2; const valueNodes = [ ...Array(countOfValueNodes).keys() ] .reduce((acc, index) => ({ ...acc, [createDataKey(`data-key-${index}c${index}d`)]: new ValueNode({} as ValueNodeConstructorParameters), }), {}); const spyArray = Object .values(valueNodes) .map((valueNode) => { return jest.spyOn(valueNode as ValueNode, 'serialized', 'get'); }); const blockNode = new BlockNode({ name: createBlockNodeName('paragraph'), data: { ...valueNodes, }, parent: {} as EditorDocument, }); blockNode.serialized; spyArray.forEach((spy) => { expect(spy).toHaveBeenCalled(); }); }); it('should call .serialized getter of all child TextNodes associated with the BlockNode', () => { const countOfTextNodes = 3; const textNodes = [ ...Array(countOfTextNodes).keys() ] .map(() => new TextNode({} as TextNodeConstructorParameters)); const spyArray = textNodes .map((textNode) => { return jest.spyOn(textNode, 'serialized', 'get'); }); const blockNode = new BlockNode({ name: createBlockNodeName('paragraph'), data: { [createDataKey('data-key-1a2b')]: textNodes, }, parent: {} as EditorDocument, }); blockNode.serialized; spyArray.forEach((spy) => { expect(spy).toHaveBeenCalled(); }); }); }); });
src/entities/BlockNode/BlockNode.spec.ts
editor-js-document-model-4cb9623
[ { "filename": "src/entities/BlockTune/BlockTune.spec.ts", "retrieved_chunk": " });\n describe('.serialized', () => {\n it('should return serialized version of the BlockTune', () => {\n // Arrange\n const tune = createBlockTuneMock({\n name: createBlockTuneName('styling'),\n data: {\n background: 'transparent',\n },\n });", "score": 0.8523811101913452 }, { "filename": "src/utils/mocks/createBlockNodeMock.ts", "retrieved_chunk": " */\nexport function createBlockNodeMock({ name, parent, tunes, data }: { name?: BlockNodeName, parent: EditorDocument, data?: BlockNodeData, tunes?: Record<BlockTuneName, BlockTune> }): BlockNode {\n return new BlockNode({\n name: name || createBlockNodeName('header'),\n parent,\n data: data || {},\n tunes: tunes || {},\n });\n}", "score": 0.8521479368209839 }, { "filename": "src/utils/mocks/createBlockTuneMock.ts", "retrieved_chunk": " data?: Record<string, unknown>,\n}): BlockTune {\n return new BlockTune(\n {\n name: name || createBlockTuneName('aligning'),\n data: data || {},\n }\n );\n}", "score": 0.8252933621406555 }, { "filename": "src/entities/TextNode/TextNode.spec.ts", "retrieved_chunk": " append: jest.fn(),\n children: [],\n } as unknown as ParentNode;\n let node: TextNode;\n beforeEach(() => {\n node = new TextNode({\n value: initialText,\n parent: parentMock as FormattingNode,\n });\n });", "score": 0.8142907619476318 }, { "filename": "src/entities/EditorDocument/EditorDocument.spec.ts", "retrieved_chunk": " for (let i = 0; i < countOfBlocks; i++) {\n const block = createBlockNodeMock({\n parent: document,\n });\n document.addBlock(block);\n }\n return document;\n}\ndescribe('EditorDocument', () => {\n describe('.length', () => {", "score": 0.8079899549484253 } ]
typescript
name: createBlockNodeName('paragraph'), data: {}, parent: {} as EditorDocument, tunes: blockTunes, });
import { FormattingNodeConstructorParameters, InlineToolName, InlineToolData } from './types'; import { ChildNode, InlineFragment, InlineNode, InlineNodeSerialized, ParentNode } from '../interfaces'; export * from './types'; /** * We need to extend FormattingNode interface with ChildNode and ParentNode ones to use the methods from mixins */ export interface FormattingNode extends ChildNode, ParentNode {} /** * FormattingNode class represents a node in a tree-like structure, used to store and manipulate formatted text content */ @ParentNode @ChildNode export class FormattingNode implements InlineNode { /** * Private field representing the name of the formatting tool applied to the content */ #tool: InlineToolName; /** * Any additional data associated with the formatting tool */ #data?: InlineToolData; /** * Constructor for FormattingNode class. * * @param args - FormattingNode constructor arguments. * @param args.tool - The name of the formatting tool applied to the content. * @param args.data - Any additional data associated with the formatting. */ constructor({ tool, data }: FormattingNodeConstructorParameters) { this.#tool = tool; this.#data = data; } /** * Returns text value length of current node (including subtree) */ public get length(): number { return this.children.reduce((sum, child) => sum + child.length, 0); } /** * Returns serialized value of the node: text and formatting fragments */ public get serialized(): InlineNodeSerialized { return { text: this.getText(), fragments: this.getFragments(), }; } /** * Inserts text to the specified index, by default appends text to the end of the current value * * @param text - text to insert * @param [index] - char index where to insert text */ public insertText(text: string, index = this.length): void { const [child, offset] = this.#findChildByIndex(index); child?.insertText(text, index - offset); } /** * Removes text form the specified range * * @param [start] - start char index of the range, by default 0 * @param [end] - end char index of the range, by default length of the text value * @returns {string} removed text */ public removeText(start = 0, end = this.length): string { const result = this.#reduceChildrenInRange( start, end, (acc, child, childStart, childEnd) => { return acc + child.removeText(childStart, childEnd); }, '' ); if (this.length === 0) { this.remove(); } return result; } /** * Returns text from the specified range * * @param [start] - start char index of the range, by default 0 * @param [end] - end char index of the range, by default length of the text value */ public getText(start = 0, end = this.length): string { return this.#reduceChildrenInRange( start, end, (acc, child, childStart, childEnd) => { return acc + child.getText(childStart, childEnd); }, '' ); } /** * Returns inline fragments for subtree including current node from the specified range * * @param [start] - start char index of the range, by default 0 * @param [end] - end char index of the range, by default length of the text value */
public getFragments(start = 0, end = this.length): InlineFragment[] {
return this.#reduceChildrenInRange<InlineFragment[]>( start, end, (acc, child, childStart, childEnd) => { /** * If child is not a FormattingNode, it doesn't include any fragments. So we skip it. */ if (!(child instanceof FormattingNode)) { return acc; } acc.push(...child.getFragments(childStart, childEnd)); return acc; }, [ { tool: this.#tool, data: this.#data, range: [start, end], } ] ); } /** * Splits current node by the specified index * * @param index - char index where to split the node * @returns {FormattingNode | null} new node */ public split(index: number): FormattingNode | null { if (index === 0 || index === this.length) { return null; } const newNode = new FormattingNode({ tool: this.#tool, data: this.#data, }); const [child, offset] = this.#findChildByIndex(index); if (!child) { return null; } // Have to save length as it is changed after split const childLength = child.length; const splitNode = child.split(index - offset); let midNodeIndex = this.children.indexOf(child); /** * If node is split or if node is not split but index equals to child length, we should split children from the next node */ if (splitNode || (index - offset === childLength)) { midNodeIndex += 1; } newNode.append(...this.children.slice(midNodeIndex)); this.parent?.insertAfter(this, newNode); return newNode; } /** * Applies formatting to the text with specified inline tool in the specified range * * @param tool - name of inline tool to apply * @param start - char start index of the range * @param end - char end index of the range * @param [data] - inline tool data if applicable */ public format(tool: InlineToolName, start: number, end: number, data?: InlineToolData): InlineNode[] { /** * In case current tool is the same as new one, do nothing */ if (tool === this.#tool) { return []; } return this.#reduceChildrenInRange<InlineNode[]>( start, end, (acc, child, childStart, childEnd) => { acc.push(...child.format(tool, childStart, childEnd, data)); return acc; }, [] ); } /** * Iterates through children in range and calls callback for each * * @param start - range start char index * @param end - range end char index * @param callback - callback to apply on children * @param initialValue - initial accumulator value * @private */ #reduceChildrenInRange<Acc>( start: number, end: number, callback: (acc: Acc, child: InlineNode, start: number, end: number) => Acc, initialValue: Acc ): Acc { let result = initialValue; for (const child of this.children) { if (start < child.length && end > 0 && start < end) { result = callback(result, child, Math.max(start, 0), Math.min(child.length, end)); } start -= child.length; end -= child.length; } return result; } /** * Returns child by passed text index * * @param index - char index * @private */ #findChildByIndex(index: number): [child: InlineNode & ChildNode | null, offset: number] { let totalLength = 0; for (const child of this.children) { if (index <= child.length + totalLength) { return [child, totalLength]; } totalLength += child.length; } return [null, totalLength]; } }
src/entities/FormattingNode/index.ts
editor-js-document-model-4cb9623
[ { "filename": "src/entities/interfaces/InlineNode.ts", "retrieved_chunk": " * Returns text value in passed range\n *\n * @param start - start char index of the range\n * @param end - end char index of the range\n */\n getText(start?: number, end?: number): string;\n /**\n * Applies inline formatting on the passed range\n *\n * @param name - name of Inline Tool to apply", "score": 0.9277199506759644 }, { "filename": "src/entities/TextNode/index.ts", "retrieved_chunk": " *\n * @param start - start char index of the range\n * @param end - end char index of the range\n */\n #cloneContents(start: number, end: number): TextNode {\n return new TextNode({\n value: this.getText(start, end),\n });\n }\n}", "score": 0.9189573526382446 }, { "filename": "src/entities/interfaces/InlineNode.ts", "retrieved_chunk": " * @param [index] - char index where to insert text\n */\n insertText(text: string, index?: number): void;\n /**\n * Removes text from the passed range\n *\n * @param [start] - start char index of the range\n * @param [end] - утв char index of the range\n * @returns {string} removed text\n */", "score": 0.9105554223060608 }, { "filename": "src/entities/interfaces/InlineNode.ts", "retrieved_chunk": " * @param [start] - start char index of the range\n * @param [end] - end char index of the range\n * @param [data] - Inline Tool data if applicable\n * @returns {InlineNode[]} - array of nodes after applied formatting\n */\n format(name: InlineToolName, start?: number, end?: number, data?: InlineToolData): InlineNode[];\n /**\n * Inserts text at passed char index\n *\n * @param text - text to insert", "score": 0.9067690372467041 }, { "filename": "src/entities/TextNode/index.ts", "retrieved_chunk": " }\n /**\n * Applies inline tool for specified range\n *\n * @param tool - name of the tool to apply\n * @param start - start char index of the range\n * @param end - end char index of the range\n * @param [data] - inline tool data if applicable\n * @returns {InlineNode[]} - array of nodes after applied formatting\n */", "score": 0.9022549986839294 } ]
typescript
public getFragments(start = 0, end = this.length): InlineFragment[] {
import { describe, it, expect, beforeEach } from '@jest/globals'; import { TextNode } from './index'; import { createInlineToolName, FormattingNode } from '../FormattingNode'; import type { ParentNode } from '../interfaces'; describe('TextNode', () => { const initialText = 'initial text'; const text = 'some text'; const parentMock = { insertAfter: jest.fn(), removeChild: jest.fn(), append: jest.fn(), children: [], } as unknown as ParentNode; let node: TextNode; beforeEach(() => { node = new TextNode({ value: initialText, parent: parentMock as FormattingNode, }); }); it('should have empty value by default', () => { node = new TextNode(); expect(node.getText()).toEqual(''); }); describe('.insertText()', () => { it('should set text to value if node is empty', () => { node = new TextNode(); node.insertText(text); expect(node.getText()).toEqual(text); }); it('should append text if not empty', () => { node.insertText(text); expect(node.getText()).toEqual(initialText + text); }); it('should prepend text if index is 0 and node is not empty', () => { node.insertText(text, 0); expect(node.getText()).toEqual(text + initialText); }); it('should insert text at index if not empty', () => { const index = 5; node.insertText(text, index); expect(node.getText()).toEqual(initialText.slice(0, index) + text + initialText.slice(index)); }); it('should throw an error if index is less than 0', () => { const f = (): void => node.insertText(text, -1); expect(f).toThrowError(); }); it('should throw an error if index is greater than node length', () => { const f = (): void => node.insertText(text, initialText.length + 1); expect(f).toThrowError(); }); }); describe('.getText()', () => { it('should return sliced value if start provided', () => { const start = 5; expect(node.getText(start)).toEqual(initialText.slice(start)); }); it('should return sliced value if end provided', () => { const end = 6; expect(node.getText(0, end)).toEqual(initialText.slice(0, end)); }); it('should return sliced value if full range provided', () => { const start = 3; const end = 9; expect(node.getText(start, end)).toEqual(initialText.slice(start, end)); }); it('should throw an error if start is invalid index', () => { expect(() => node.getText(-1)).toThrowError(); expect(() => node.getText(initialText.length + 1)).toThrowError(); }); it('should throw an error if end is invalid index', () => { expect(() => node.getText(0, initialText.length + 1)).toThrowError(); }); it('should throw an error if end index is greater than start index', () => { const start = 5; const end = 3; expect(() => node.getText(start, end)).toThrowError(); }); it('should not throw an error if end index is equal to start index', () => { const start = 5; const end = 5; expect(() => node.getText(start, end)).not.toThrowError(); }); }); describe('.removeText()', () => { it('should remove all text by default', () => { node.removeText(); expect(node.getText()).toEqual(''); }); it('should remove text from specified index', () => { const start = 3; node.removeText(start); expect(node.getText()).toEqual(initialText.slice(0, start)); }); it('should remove text from 0 to specified end index', () => { const end = 8; node.removeText(0, end); expect(node.getText()).toEqual(initialText.slice(end)); }); it('should remove text from specified start and end indecies', () => { const start = 3; const end = 8; node.removeText(start, end); expect(node.getText()).toEqual(initialText.slice(0, start) + initialText.slice(end)); }); it('should call remove() method if node is empty after removeText() call', () => { jest.spyOn(node, 'remove'); node.removeText(); expect(node.remove).toBeCalled(); }); }); describe('.format()', () => { it('should return just one FormattingNode, if formatting full TextNode', () => { const name = createInlineToolName('bold'); const fragments = node.format(name, 0, initialText.length); expect(fragments).toHaveLength(1); expect(fragments[0]).toBeInstanceOf(FormattingNode); }); it('should return two fragments if formatting from the start, but not to the end', () => { const name = createInlineToolName('bold'); const end = 5; const fragments = node.format(name, 0, end); expect(fragments).toHaveLength(2); expect(fragments[0]).toBeInstanceOf(FormattingNode); expect(fragments[1]).toBeInstanceOf(TextNode); }); it('should return two fragments if formatting to the end, but not from the start', () => { const name = createInlineToolName('bold'); const start = 5; const fragments = node.format(name, start, initialText.length); expect(fragments).toHaveLength(2); expect(fragments[0]).toBeInstanceOf(TextNode); expect(fragments[1]).toBeInstanceOf(FormattingNode); }); it('should return three fragments if formatting in the middle', () => { const name = createInlineToolName('bold'); const start = 5; const end = 8; const fragments = node.format(name, start, end); // eslint-disable-next-line @typescript-eslint/no-magic-numbers expect(fragments).toHaveLength(3); expect(fragments[0]).toBeInstanceOf(TextNode); expect(fragments[1]).toBeInstanceOf(FormattingNode); expect(fragments[2]).toBeInstanceOf(TextNode); }); it('should return FormattingNode with a TextNode as a child with correct text value', () => { const name = createInlineToolName('bold'); const start = 5; const end = 8; const fragments = node.format(name, start, end); const formattingNode = fragments[1] as FormattingNode; expect(formattingNode.children[0].getText()).toEqual(initialText.slice(start, end)); }); it('should call parent\'s insertAfter with new nodes', () => { const name = createInlineToolName('bold'); const start = 5; const end = 8; const fragments = node.format(name, start, end);
expect(parentMock.insertAfter).toBeCalledWith(node, ...fragments);
}); }); describe('.split()', () => { const index = 5; it('should not split (return null) if index is 0', () => { const newNode = node.split(0); expect(newNode).toBeNull(); }); it('should not split (return null) if index equals text length', () => { const newNode = node.split(initialText.length); expect(newNode).toBeNull(); }); it('should create new TextNode on split', () => { const newNode = node.split(index); expect(newNode).toBeInstanceOf(TextNode); }); it('should create new TextNode with text value splitted from the original one', () => { const newNode = node.split(index); expect(newNode?.getText()).toEqual(initialText.slice(index)); }); it('should remove split text value from the original node', () => { node.split(index); expect(node.getText()).toEqual(initialText.slice(0, index)); }); it('should insert new node to the parent', () => { const newNode = node.split(index); expect(parentMock.insertAfter).toBeCalledWith(node, newNode); }); }); describe('.serialized', () => { it('should return text value and empty array of fragments', () => { const result = node.serialized; expect(result).toEqual({ text: initialText, fragments: [], }); }); }); });
src/entities/TextNode/TextNode.spec.ts
editor-js-document-model-4cb9623
[ { "filename": "src/entities/FormattingNode/FormattingNode.spec.ts", "retrieved_chunk": " const newNode = node.split(index);\n expect(parentMock.insertAfter).toBeCalledWith(node, newNode);\n });\n });\n describe('.format()', () => {\n const start = 3;\n const end = 5;\n it('should apply formatting to the relevant child', () => {\n node.format(anotherTool, start, end);\n expect(childMock.format).toBeCalledWith(anotherTool, start, end, undefined);", "score": 0.9194252490997314 }, { "filename": "src/entities/FormattingNode/FormattingNode.spec.ts", "retrieved_chunk": " });\n it('should return array of new formatting nodes', () => {\n const result = node.format(anotherTool, start, end);\n expect(result).toEqual(childMock.format(anotherTool, start, end));\n });\n });\n});", "score": 0.8875060677528381 }, { "filename": "src/entities/FormattingNode/FormattingNode.spec.ts", "retrieved_chunk": " */\n it('should create new FormattingNode with children split from the original one', () => {\n const newNode = node.split(childMock.length);\n expect(newNode?.children).toEqual([ anotherChildMock ]);\n });\n it('should call split method of child containing the specified index', () => {\n node.split(index);\n expect(childMock.split).toBeCalledWith(index);\n });\n it('should insert new node to the parent', () => {", "score": 0.873264729976654 }, { "filename": "src/entities/FormattingNode/FormattingNode.spec.ts", "retrieved_chunk": " removeSpy.mockRestore();\n lengthSpy.mockRestore();\n });\n });\n describe('.getText()', () => {\n const start = 3;\n const end = 5;\n it('should call getText() for the relevant child', () => {\n node.getText(start, end);\n expect(childMock.getText).toBeCalledWith(start, end);", "score": 0.8723069429397583 }, { "filename": "src/entities/FormattingNode/FormattingNode.spec.ts", "retrieved_chunk": " });\n });\n describe('.removeText()', () => {\n const start = 3;\n const end = 5;\n it('should remove text from appropriate child', () => {\n node.removeText(start, end);\n expect(childMock.removeText).toBeCalledWith(start, end);\n });\n it('should adjust indices by child offset', () => {", "score": 0.8641027808189392 } ]
typescript
expect(parentMock.insertAfter).toBeCalledWith(node, ...fragments);
import { FormattingNodeConstructorParameters, InlineToolName, InlineToolData } from './types'; import { ChildNode, InlineFragment, InlineNode, InlineNodeSerialized, ParentNode } from '../interfaces'; export * from './types'; /** * We need to extend FormattingNode interface with ChildNode and ParentNode ones to use the methods from mixins */ export interface FormattingNode extends ChildNode, ParentNode {} /** * FormattingNode class represents a node in a tree-like structure, used to store and manipulate formatted text content */ @ParentNode @ChildNode export class FormattingNode implements InlineNode { /** * Private field representing the name of the formatting tool applied to the content */ #tool: InlineToolName; /** * Any additional data associated with the formatting tool */ #data?: InlineToolData; /** * Constructor for FormattingNode class. * * @param args - FormattingNode constructor arguments. * @param args.tool - The name of the formatting tool applied to the content. * @param args.data - Any additional data associated with the formatting. */ constructor({ tool, data }: FormattingNodeConstructorParameters) { this.#tool = tool; this.#data = data; } /** * Returns text value length of current node (including subtree) */ public get length(): number { return this.children.reduce((sum, child) => sum + child.length, 0); } /** * Returns serialized value of the node: text and formatting fragments */ public get serialized(): InlineNodeSerialized { return { text: this.getText(), fragments: this.getFragments(), }; } /** * Inserts text to the specified index, by default appends text to the end of the current value * * @param text - text to insert * @param [index] - char index where to insert text */ public insertText(text: string, index = this.length): void { const [child, offset] = this.#findChildByIndex(index); child?.insertText(text, index - offset); } /** * Removes text form the specified range * * @param [start] - start char index of the range, by default 0 * @param [end] - end char index of the range, by default length of the text value * @returns {string} removed text */ public removeText(start = 0, end = this.length): string { const result = this.#reduceChildrenInRange( start, end, (acc, child, childStart, childEnd) => { return acc + child.removeText(childStart, childEnd); }, '' ); if (this.length === 0) { this.remove(); } return result; } /** * Returns text from the specified range * * @param [start] - start char index of the range, by default 0 * @param [end] - end char index of the range, by default length of the text value */ public getText(start = 0, end = this.length): string { return this.#reduceChildrenInRange( start, end, (acc, child, childStart, childEnd) => { return acc + child.getText(childStart, childEnd); }, '' ); } /** * Returns inline fragments for subtree including current node from the specified range * * @param [start] - start char index of the range, by default 0 * @param [end] - end char index of the range, by default length of the text value */ public getFragments(start = 0, end = this.length): InlineFragment[] { return this.#reduceChildrenInRange<InlineFragment[]>( start, end, (acc, child, childStart, childEnd) => { /** * If child is not a FormattingNode, it doesn't include any fragments. So we skip it. */ if (!(child instanceof FormattingNode)) { return acc; } acc.push(...child.getFragments(childStart, childEnd)); return acc; }, [ { tool: this.#tool, data: this.#data, range: [start, end], } ] ); } /** * Splits current node by the specified index * * @param index - char index where to split the node * @returns {FormattingNode | null} new node */ public split(index: number): FormattingNode | null { if (index === 0 || index === this.length) { return null; } const newNode = new FormattingNode({ tool: this.#tool, data: this.#data, }); const [child, offset] = this.#findChildByIndex(index); if (!child) { return null; } // Have to save length as it is changed after split const childLength = child.length; const splitNode = child.split(index - offset);
let midNodeIndex = this.children.indexOf(child);
/** * If node is split or if node is not split but index equals to child length, we should split children from the next node */ if (splitNode || (index - offset === childLength)) { midNodeIndex += 1; } newNode.append(...this.children.slice(midNodeIndex)); this.parent?.insertAfter(this, newNode); return newNode; } /** * Applies formatting to the text with specified inline tool in the specified range * * @param tool - name of inline tool to apply * @param start - char start index of the range * @param end - char end index of the range * @param [data] - inline tool data if applicable */ public format(tool: InlineToolName, start: number, end: number, data?: InlineToolData): InlineNode[] { /** * In case current tool is the same as new one, do nothing */ if (tool === this.#tool) { return []; } return this.#reduceChildrenInRange<InlineNode[]>( start, end, (acc, child, childStart, childEnd) => { acc.push(...child.format(tool, childStart, childEnd, data)); return acc; }, [] ); } /** * Iterates through children in range and calls callback for each * * @param start - range start char index * @param end - range end char index * @param callback - callback to apply on children * @param initialValue - initial accumulator value * @private */ #reduceChildrenInRange<Acc>( start: number, end: number, callback: (acc: Acc, child: InlineNode, start: number, end: number) => Acc, initialValue: Acc ): Acc { let result = initialValue; for (const child of this.children) { if (start < child.length && end > 0 && start < end) { result = callback(result, child, Math.max(start, 0), Math.min(child.length, end)); } start -= child.length; end -= child.length; } return result; } /** * Returns child by passed text index * * @param index - char index * @private */ #findChildByIndex(index: number): [child: InlineNode & ChildNode | null, offset: number] { let totalLength = 0; for (const child of this.children) { if (index <= child.length + totalLength) { return [child, totalLength]; } totalLength += child.length; } return [null, totalLength]; } }
src/entities/FormattingNode/index.ts
editor-js-document-model-4cb9623
[ { "filename": "src/entities/interfaces/ParentNode.ts", "retrieved_chunk": " *\n * @param child - child to remove\n */\n public removeChild(child: ChildNode): void {\n const index = this.children.indexOf(child);\n this.children.splice(index, 1);\n child.remove();\n }\n /**\n * Inserts new children after specified target", "score": 0.8238852620124817 }, { "filename": "src/entities/TextNode/index.ts", "retrieved_chunk": " /**\n * Splits current node into two nodes by the specified index\n *\n * @param index - char index where to split\n * @returns {TextNode|null} - new node or null if split is not applicable\n */\n public split(index: number): TextNode | null {\n if (index === 0 || index === this.length) {\n return null;\n }", "score": 0.8116808533668518 }, { "filename": "src/entities/interfaces/ParentNode.ts", "retrieved_chunk": " * Appends passed children to this node\n *\n * @param children - array of children to append\n */\n public append(...children: ChildNode[]): void {\n /**\n * If node is already a child of current node, remove it to append at the end\n */\n children.forEach(child => {\n const index = this.children.indexOf(child);", "score": 0.8102066516876221 }, { "filename": "src/entities/interfaces/ParentNode.ts", "retrieved_chunk": " *\n * @param target - target after which to insert new children\n * @param children - children nodes to insert\n */\n public insertAfter(target: ChildNode, ...children: ChildNode[]): void {\n /**\n * If node is already a child of current node, remove it to append at the end\n */\n children.forEach(child => {\n const index = this.children.indexOf(child);", "score": 0.8087800741195679 }, { "filename": "src/entities/interfaces/ParentNode.ts", "retrieved_chunk": " if (index === -1) {\n return;\n }\n this.children.splice(index, 1);\n });\n this.children.push(...children);\n children.forEach(child => child.appendTo(this));\n }\n /**\n * Removes a child from the parent", "score": 0.8066998720169067 } ]
typescript
let midNodeIndex = this.children.indexOf(child);
import { FormattingNodeConstructorParameters, InlineToolName, InlineToolData } from './types'; import { ChildNode, InlineFragment, InlineNode, InlineNodeSerialized, ParentNode } from '../interfaces'; export * from './types'; /** * We need to extend FormattingNode interface with ChildNode and ParentNode ones to use the methods from mixins */ export interface FormattingNode extends ChildNode, ParentNode {} /** * FormattingNode class represents a node in a tree-like structure, used to store and manipulate formatted text content */ @ParentNode @ChildNode export class FormattingNode implements InlineNode { /** * Private field representing the name of the formatting tool applied to the content */ #tool: InlineToolName; /** * Any additional data associated with the formatting tool */ #data?: InlineToolData; /** * Constructor for FormattingNode class. * * @param args - FormattingNode constructor arguments. * @param args.tool - The name of the formatting tool applied to the content. * @param args.data - Any additional data associated with the formatting. */ constructor({ tool, data }: FormattingNodeConstructorParameters) { this.#tool = tool; this.#data = data; } /** * Returns text value length of current node (including subtree) */ public get length(): number { return this.children.reduce((sum, child) => sum + child.length, 0); } /** * Returns serialized value of the node: text and formatting fragments */ public get serialized(): InlineNodeSerialized { return { text: this.getText(), fragments: this.getFragments(), }; } /** * Inserts text to the specified index, by default appends text to the end of the current value * * @param text - text to insert * @param [index] - char index where to insert text */ public insertText(text: string, index = this.length): void { const [child, offset] = this.#findChildByIndex(index); child?.insertText(text, index - offset); } /** * Removes text form the specified range * * @param [start] - start char index of the range, by default 0 * @param [end] - end char index of the range, by default length of the text value * @returns {string} removed text */ public removeText(start = 0, end = this.length): string { const result = this.#reduceChildrenInRange( start, end, (acc, child, childStart, childEnd) => { return acc + child.removeText(childStart, childEnd); }, '' ); if (this.length === 0) { this.remove(); } return result; } /** * Returns text from the specified range * * @param [start] - start char index of the range, by default 0 * @param [end] - end char index of the range, by default length of the text value */ public getText(start = 0, end = this.length): string { return this.#reduceChildrenInRange( start, end, (acc, child, childStart, childEnd) => { return acc + child.getText(childStart, childEnd); }, '' ); } /** * Returns inline fragments for subtree including current node from the specified range * * @param [start] - start char index of the range, by default 0 * @param [end] - end char index of the range, by default length of the text value */ public getFragments(start = 0, end = this.length): InlineFragment[] { return this.#reduceChildrenInRange<InlineFragment[]>( start, end, (acc, child, childStart, childEnd) => { /** * If child is not a FormattingNode, it doesn't include any fragments. So we skip it. */ if (!(child instanceof FormattingNode)) { return acc; } acc.push(...child.getFragments(childStart, childEnd)); return acc; }, [ { tool: this.#tool, data: this.#data, range: [start, end], } ] ); } /** * Splits current node by the specified index * * @param index - char index where to split the node * @returns {FormattingNode | null} new node */ public split(index: number): FormattingNode | null { if (index === 0 || index === this.length) { return null; } const newNode = new FormattingNode({ tool: this.#tool, data: this.#data, }); const [child, offset] = this.#findChildByIndex(index); if (!child) { return null; } // Have to save length as it is changed after split const childLength = child.length; const splitNode = child.split(index - offset); let midNodeIndex = this.children.indexOf(child); /** * If node is split or if node is not split but index equals to child length, we should split children from the next node */ if (splitNode || (index - offset === childLength)) { midNodeIndex += 1; } newNode.append(
...this.children.slice(midNodeIndex));
this.parent?.insertAfter(this, newNode); return newNode; } /** * Applies formatting to the text with specified inline tool in the specified range * * @param tool - name of inline tool to apply * @param start - char start index of the range * @param end - char end index of the range * @param [data] - inline tool data if applicable */ public format(tool: InlineToolName, start: number, end: number, data?: InlineToolData): InlineNode[] { /** * In case current tool is the same as new one, do nothing */ if (tool === this.#tool) { return []; } return this.#reduceChildrenInRange<InlineNode[]>( start, end, (acc, child, childStart, childEnd) => { acc.push(...child.format(tool, childStart, childEnd, data)); return acc; }, [] ); } /** * Iterates through children in range and calls callback for each * * @param start - range start char index * @param end - range end char index * @param callback - callback to apply on children * @param initialValue - initial accumulator value * @private */ #reduceChildrenInRange<Acc>( start: number, end: number, callback: (acc: Acc, child: InlineNode, start: number, end: number) => Acc, initialValue: Acc ): Acc { let result = initialValue; for (const child of this.children) { if (start < child.length && end > 0 && start < end) { result = callback(result, child, Math.max(start, 0), Math.min(child.length, end)); } start -= child.length; end -= child.length; } return result; } /** * Returns child by passed text index * * @param index - char index * @private */ #findChildByIndex(index: number): [child: InlineNode & ChildNode | null, offset: number] { let totalLength = 0; for (const child of this.children) { if (index <= child.length + totalLength) { return [child, totalLength]; } totalLength += child.length; } return [null, totalLength]; } }
src/entities/FormattingNode/index.ts
editor-js-document-model-4cb9623
[ { "filename": "src/entities/interfaces/ParentNode.ts", "retrieved_chunk": " * Appends passed children to this node\n *\n * @param children - array of children to append\n */\n public append(...children: ChildNode[]): void {\n /**\n * If node is already a child of current node, remove it to append at the end\n */\n children.forEach(child => {\n const index = this.children.indexOf(child);", "score": 0.8542159795761108 }, { "filename": "src/entities/interfaces/ParentNode.ts", "retrieved_chunk": " *\n * @param target - target after which to insert new children\n * @param children - children nodes to insert\n */\n public insertAfter(target: ChildNode, ...children: ChildNode[]): void {\n /**\n * If node is already a child of current node, remove it to append at the end\n */\n children.forEach(child => {\n const index = this.children.indexOf(child);", "score": 0.8517643809318542 }, { "filename": "src/entities/interfaces/ParentNode.ts", "retrieved_chunk": " *\n * @param child - child to remove\n */\n public removeChild(child: ChildNode): void {\n const index = this.children.indexOf(child);\n this.children.splice(index, 1);\n child.remove();\n }\n /**\n * Inserts new children after specified target", "score": 0.8359620571136475 }, { "filename": "src/entities/TextNode/index.ts", "retrieved_chunk": " /**\n * Splits current node into two nodes by the specified index\n *\n * @param index - char index where to split\n * @returns {TextNode|null} - new node or null if split is not applicable\n */\n public split(index: number): TextNode | null {\n if (index === 0 || index === this.length) {\n return null;\n }", "score": 0.8263200521469116 }, { "filename": "src/entities/TextNode/index.ts", "retrieved_chunk": " const newNode = new TextNode();\n const text = this.removeText(index);\n newNode.insertText(text);\n this.parent?.insertAfter(this, newNode);\n return newNode;\n }\n /**\n * Validates index\n *\n * @param index - char index to validate", "score": 0.8123306632041931 } ]
typescript
...this.children.slice(midNodeIndex));
import { FormattingNodeConstructorParameters, InlineToolName, InlineToolData } from './types'; import { ChildNode, InlineFragment, InlineNode, InlineNodeSerialized, ParentNode } from '../interfaces'; export * from './types'; /** * We need to extend FormattingNode interface with ChildNode and ParentNode ones to use the methods from mixins */ export interface FormattingNode extends ChildNode, ParentNode {} /** * FormattingNode class represents a node in a tree-like structure, used to store and manipulate formatted text content */ @ParentNode @ChildNode export class FormattingNode implements InlineNode { /** * Private field representing the name of the formatting tool applied to the content */ #tool: InlineToolName; /** * Any additional data associated with the formatting tool */ #data?: InlineToolData; /** * Constructor for FormattingNode class. * * @param args - FormattingNode constructor arguments. * @param args.tool - The name of the formatting tool applied to the content. * @param args.data - Any additional data associated with the formatting. */ constructor({ tool, data }: FormattingNodeConstructorParameters) { this.#tool = tool; this.#data = data; } /** * Returns text value length of current node (including subtree) */ public get length(): number { return this.children.reduce((sum, child) => sum + child.length, 0); } /** * Returns serialized value of the node: text and formatting fragments */ public get serialized(): InlineNodeSerialized { return { text: this.getText(), fragments: this.getFragments(), }; } /** * Inserts text to the specified index, by default appends text to the end of the current value * * @param text - text to insert * @param [index] - char index where to insert text */ public insertText(text: string, index = this.length): void { const [child, offset] = this.#findChildByIndex(index); child?.insertText(text, index - offset); } /** * Removes text form the specified range * * @param [start] - start char index of the range, by default 0 * @param [end] - end char index of the range, by default length of the text value * @returns {string} removed text */ public removeText(start = 0, end = this.length): string { const result = this.#reduceChildrenInRange( start, end, (acc, child, childStart, childEnd) => { return acc + child.removeText(childStart, childEnd); }, '' ); if (this.length === 0) { this.remove(); } return result; } /** * Returns text from the specified range * * @param [start] - start char index of the range, by default 0 * @param [end] - end char index of the range, by default length of the text value */ public getText(start = 0, end = this.length): string { return this.#reduceChildrenInRange( start, end, (acc, child, childStart, childEnd) => { return acc + child.getText(childStart, childEnd); }, '' ); } /** * Returns inline fragments for subtree including current node from the specified range * * @param [start] - start char index of the range, by default 0 * @param [end] - end char index of the range, by default length of the text value */ public getFragments(start = 0, end = this.length): InlineFragment[] { return this.#reduceChildrenInRange<InlineFragment[]>( start, end, (acc, child, childStart, childEnd) => { /** * If child is not a FormattingNode, it doesn't include any fragments. So we skip it. */ if (!(child instanceof FormattingNode)) { return acc; } acc.push(...child.getFragments(childStart, childEnd)); return acc; }, [ { tool: this.#tool, data: this.#data, range: [start, end], } ] ); } /** * Splits current node by the specified index * * @param index - char index where to split the node * @returns {FormattingNode | null} new node */ public split(index: number): FormattingNode | null { if (index === 0 || index === this.length) { return null; } const newNode = new FormattingNode({ tool: this.#tool, data: this.#data, }); const [child, offset] = this.#findChildByIndex(index); if (!child) { return null; } // Have to save length as it is changed after split const childLength = child.length; const splitNode = child.split(index - offset); let midNodeIndex = this.children.indexOf(child); /** * If node is split or if node is not split but index equals to child length, we should split children from the next node */ if (splitNode || (index - offset === childLength)) { midNodeIndex += 1; } newNode.append(...this.children.slice(midNodeIndex)); this.parent?.insertAfter(this, newNode); return newNode; } /** * Applies formatting to the text with specified inline tool in the specified range * * @param tool - name of inline tool to apply * @param start - char start index of the range * @param end - char end index of the range * @param [data] - inline tool data if applicable */ public format(tool: InlineToolName, start: number, end: number, data?: InlineToolData): InlineNode[] { /** * In case current tool is the same as new one, do nothing */ if (tool === this.#tool) { return []; } return this.#reduceChildrenInRange<InlineNode[]>( start, end, (acc, child, childStart, childEnd) => { acc.push(...child.format(tool, childStart, childEnd, data)); return acc; }, [] ); } /** * Iterates through children in range and calls callback for each * * @param start - range start char index * @param end - range end char index * @param callback - callback to apply on children * @param initialValue - initial accumulator value * @private */ #reduceChildrenInRange<Acc>( start: number, end: number, callback: (acc: Acc, child: InlineNode, start: number, end: number) => Acc, initialValue: Acc ): Acc { let result = initialValue; for (const child of this.children) { if (
start < child.length && end > 0 && start < end) {
result = callback(result, child, Math.max(start, 0), Math.min(child.length, end)); } start -= child.length; end -= child.length; } return result; } /** * Returns child by passed text index * * @param index - char index * @private */ #findChildByIndex(index: number): [child: InlineNode & ChildNode | null, offset: number] { let totalLength = 0; for (const child of this.children) { if (index <= child.length + totalLength) { return [child, totalLength]; } totalLength += child.length; } return [null, totalLength]; } }
src/entities/FormattingNode/index.ts
editor-js-document-model-4cb9623
[ { "filename": "src/entities/TextNode/index.ts", "retrieved_chunk": " *\n * @param start - start char index of the range\n * @param end - end char index of the range\n */\n #cloneContents(start: number, end: number): TextNode {\n return new TextNode({\n value: this.getText(start, end),\n });\n }\n}", "score": 0.8394131660461426 }, { "filename": "src/entities/TextNode/index.ts", "retrieved_chunk": " public format(tool: InlineToolName, start: number, end: number, data?: InlineToolData): InlineNode[] {\n this.#validateIndex(start);\n this.#validateIndex(end);\n const formattingNode = new FormattingNode({\n tool,\n data,\n });\n const fragments: ChildNode[] = [];\n /**\n * If start index is greater than 0, we need extract part of the text before the start index", "score": 0.8357772827148438 }, { "filename": "src/entities/interfaces/ParentNode.ts", "retrieved_chunk": " *\n * @param target - target after which to insert new children\n * @param children - children nodes to insert\n */\n public insertAfter(target: ChildNode, ...children: ChildNode[]): void {\n /**\n * If node is already a child of current node, remove it to append at the end\n */\n children.forEach(child => {\n const index = this.children.indexOf(child);", "score": 0.824077308177948 }, { "filename": "src/entities/interfaces/ParentNode.ts", "retrieved_chunk": " * Appends passed children to this node\n *\n * @param children - array of children to append\n */\n public append(...children: ChildNode[]): void {\n /**\n * If node is already a child of current node, remove it to append at the end\n */\n children.forEach(child => {\n const index = this.children.indexOf(child);", "score": 0.8194954991340637 }, { "filename": "src/entities/interfaces/InlineNode.ts", "retrieved_chunk": " * @param [start] - start char index of the range\n * @param [end] - end char index of the range\n * @param [data] - Inline Tool data if applicable\n * @returns {InlineNode[]} - array of nodes after applied formatting\n */\n format(name: InlineToolName, start?: number, end?: number, data?: InlineToolData): InlineNode[];\n /**\n * Inserts text at passed char index\n *\n * @param text - text to insert", "score": 0.8179930448532104 } ]
typescript
start < child.length && end > 0 && start < end) {
import { FormattingNode, InlineToolName, InlineToolData } from '../FormattingNode'; import { TextNodeConstructorParameters } from './types'; import { ChildNode, InlineNode, InlineNodeSerialized } from '../interfaces'; export * from './types'; export interface TextNode extends ChildNode {} /** * TextNode class represents a node in a tree-like structure, used to store and manipulate text content. */ @ChildNode export class TextNode implements InlineNode { /** * Private field representing the text content of the node */ #value: string; /** * Constructor for TextNode class * * @param args - TextNode constructor arguments. * @param args.value - Text content of the node. */ constructor({ value = '' }: TextNodeConstructorParameters = {}) { this.#value = value; } /** * Returns length of the text */ public get length(): number { return this.#value.length; } /** * Returns serialized value of the node */ public get serialized(): InlineNodeSerialized { return { text: this.getText(), // No fragments for text node fragments: [], }; } /** * Inserts text to specified position. By default, appends new text to the current value * * @param text - text to insert * @param [index] - char start index */ public insertText(text: string, index = this.length): void { this.#validateIndex(index); this.#value = this.#value.slice(0, index) + text + this.#value.slice(index); } /** * Remove text from specified range * * @param [start] - start char index of the range, 0 by default * @param [end] - end char index of the range, text length by default * @returns {string} removed text */ public removeText(start = 0, end = this.length): string { this.#validateIndex(start); this.#validateIndex(end); const removedValue = this.#value.slice(start, end); this.#value = this.#value.slice(0, start) + this.#value.slice(end); if (this.length === 0) { this.remove(); } return removedValue; } /** * Returns text value from the specified range * * @param [start] - start char index of the range, 0 by default * @param [end] - end char index of the range, text length by default */ public getText(start = 0, end = this.length): string { if (start > end) { // Stryker disable next-line StringLiteral throw new Error(`Start index ${start} should be less or equal than end index ${end}`); } this.#validateIndex(start); this.#validateIndex(end); return this.#value.slice(start, end); } /** * Applies inline tool for specified range * * @param tool - name of the tool to apply * @param start - start char index of the range * @param end - end char index of the range * @param [data] - inline tool data if applicable * @returns {InlineNode[]} - array of nodes after applied formatting */ public format(tool: InlineToolName, start: number, end: number, data?: InlineToolData): InlineNode[] { this.#validateIndex(start); this.#validateIndex(end);
const formattingNode = new FormattingNode({
tool, data, }); const fragments: ChildNode[] = []; /** * If start index is greater than 0, we need extract part of the text before the start index */ if (start > 0) { fragments.push(this.#cloneContents(0, start)); } /** * Formatting is applied to the specified range */ const formattedFragment = this.#cloneContents(start, end); formattedFragment.appendTo(formattingNode); fragments.push(formattingNode); /** * If end index is less than the text length, we need to extract part of the text after the end index */ if (end < this.length) { fragments.push(this.#cloneContents(end, this.length)); } this.parent?.insertAfter(this, ...fragments); this.remove(); return fragments; } /** * Splits current node into two nodes by the specified index * * @param index - char index where to split * @returns {TextNode|null} - new node or null if split is not applicable */ public split(index: number): TextNode | null { if (index === 0 || index === this.length) { return null; } const newNode = new TextNode(); const text = this.removeText(index); newNode.insertText(text); this.parent?.insertAfter(this, newNode); return newNode; } /** * Validates index * * @param index - char index to validate * @throws Error if index is out of the text length */ #validateIndex(index: number): void { if (index < 0 || index > this.length) { // Stryker disable next-line StringLiteral throw new Error(`Index ${index} is not in valid range [0, ${this.length}]`); } } /** * Clones specified range to a new TextNode * * @param start - start char index of the range * @param end - end char index of the range */ #cloneContents(start: number, end: number): TextNode { return new TextNode({ value: this.getText(start, end), }); } }
src/entities/TextNode/index.ts
editor-js-document-model-4cb9623
[ { "filename": "src/entities/interfaces/InlineNode.ts", "retrieved_chunk": " * @param [start] - start char index of the range\n * @param [end] - end char index of the range\n * @param [data] - Inline Tool data if applicable\n * @returns {InlineNode[]} - array of nodes after applied formatting\n */\n format(name: InlineToolName, start?: number, end?: number, data?: InlineToolData): InlineNode[];\n /**\n * Inserts text at passed char index\n *\n * @param text - text to insert", "score": 0.9566935300827026 }, { "filename": "src/entities/FormattingNode/index.ts", "retrieved_chunk": " * @param tool - name of inline tool to apply\n * @param start - char start index of the range\n * @param end - char end index of the range\n * @param [data] - inline tool data if applicable\n */\n public format(tool: InlineToolName, start: number, end: number, data?: InlineToolData): InlineNode[] {\n /**\n * In case current tool is the same as new one, do nothing\n */\n if (tool === this.#tool) {", "score": 0.9547206163406372 }, { "filename": "src/entities/interfaces/InlineNode.ts", "retrieved_chunk": " * Returns text value in passed range\n *\n * @param start - start char index of the range\n * @param end - end char index of the range\n */\n getText(start?: number, end?: number): string;\n /**\n * Applies inline formatting on the passed range\n *\n * @param name - name of Inline Tool to apply", "score": 0.8990285396575928 }, { "filename": "src/entities/FormattingNode/index.ts", "retrieved_chunk": " * @param args.data - Any additional data associated with the formatting.\n */\n constructor({ tool, data }: FormattingNodeConstructorParameters) {\n this.#tool = tool;\n this.#data = data;\n }\n /**\n * Returns text value length of current node (including subtree)\n */\n public get length(): number {", "score": 0.895380437374115 }, { "filename": "src/entities/interfaces/InlineNode.ts", "retrieved_chunk": " * @param [index] - char index where to insert text\n */\n insertText(text: string, index?: number): void;\n /**\n * Removes text from the passed range\n *\n * @param [start] - start char index of the range\n * @param [end] - утв char index of the range\n * @returns {string} removed text\n */", "score": 0.8837069272994995 } ]
typescript
const formattingNode = new FormattingNode({
import { EditorDocument } from '../EditorDocument'; import { BlockTune, BlockTuneName, BlockTuneSerialized } from '../BlockTune'; import { BlockNodeConstructorParameters, BlockNodeName, createBlockNodeName, DataKey, createDataKey, BlockNodeData, BlockNodeSerialized } from './types'; /** * BlockNode class represents a node in a tree-like structure used to store and manipulate Blocks in an editor document. * A BlockNode can contain one or more child nodes of type TextNode, ValueNode or FormattingNode. * It can also be associated with one or more BlockTunes, which can modify the behavior of the BlockNode. */ export class BlockNode { /** * Field representing a name of the Tool created this Block */ #name: BlockNodeName; /** * Field representing the content of the Block */ #data: BlockNodeData; /** * Field representing the parent EditorDocument of the BlockNode */ #parent: EditorDocument; /** * Private field representing the BlockTunes associated with the BlockNode */ #tunes: Record<BlockTuneName, BlockTune>; /** * Constructor for BlockNode class. * * @param args - TextNode constructor arguments. * @param args.name - The name of the BlockNode. * @param args.data - The content of the BlockNode. * @param args.parent - The parent EditorDocument of the BlockNode. * @param args.tunes - The BlockTunes associated with the BlockNode. */ constructor({ name, data, parent, tunes = {} }: BlockNodeConstructorParameters) { this.#name = name; this.#data = data; this.#parent = parent; this.#tunes = tunes; } /** * Returns serialized object representing the BlockNode */ public get serialized(): BlockNodeSerialized { const serializedData = Object .entries(this.#data) .reduce( (acc, [dataKey, value]) => { /** * If the value is an array, we need to serialize each node in the array * Value is an array if the BlockNode contains TextNodes and FormattingNodes * After serializing there will be InlineNodeSerialized object */ if (value instanceof Array) { acc[dataKey] = value.map((node) => node.serialized); return acc; } acc[dataKey] = value.serialized; return acc; }, {} as Record<string, unknown> ); const serializedTunes = Object .entries(this.#tunes) .reduce( (acc, [name, tune]) => { acc[name] = tune.serialized; return acc; },
{} as Record<string, BlockTuneSerialized> );
return { name: this.#name, data: serializedData, tunes: serializedTunes, }; } } export { BlockNodeName, createBlockNodeName, DataKey, createDataKey };
src/entities/BlockNode/index.ts
editor-js-document-model-4cb9623
[ { "filename": "src/entities/BlockNode/BlockNode.spec.ts", "retrieved_chunk": " createBlockTuneName('align'),\n createBlockTuneName('font-size'),\n createBlockTuneName('font-weight'),\n ];\n const blockTunes = blockTunesNames.reduce((acc, name) => ({\n ...acc,\n [name]: new BlockTune({} as BlockTuneConstructorParameters),\n }), {});\n const spyArray = Object\n .values(blockTunes)", "score": 0.7994147539138794 }, { "filename": "src/entities/BlockTune/index.ts", "retrieved_chunk": " */\n public update(key: string, value: unknown): void {\n this.#data[key] = value;\n }\n /**\n * Returns serialized version of the BlockTune.\n */\n public get serialized(): BlockTuneSerialized {\n return {\n name: this.#name,", "score": 0.7829612493515015 }, { "filename": "src/entities/BlockTune/BlockTune.spec.ts", "retrieved_chunk": " // Act\n const tuneSerialized = tune.serialized;\n // Assert\n expect(tuneSerialized).toEqual(\n {\n name: 'styling',\n data: {\n background: 'transparent',\n },\n }", "score": 0.7708536386489868 }, { "filename": "src/utils/mocks/createBlockTuneMock.ts", "retrieved_chunk": " data?: Record<string, unknown>,\n}): BlockTune {\n return new BlockTune(\n {\n name: name || createBlockTuneName('aligning'),\n data: data || {},\n }\n );\n}", "score": 0.761613130569458 }, { "filename": "src/entities/BlockNode/BlockNode.spec.ts", "retrieved_chunk": " .map((blockTune) => {\n return jest.spyOn(blockTune as BlockTune, 'serialized', 'get');\n });\n const blockNode = new BlockNode({\n name: createBlockNodeName('paragraph'),\n data: {},\n parent: {} as EditorDocument,\n tunes: blockTunes,\n });\n blockNode.serialized;", "score": 0.7578418254852295 } ]
typescript
{} as Record<string, BlockTuneSerialized> );
import { BlockTuneConstructorParameters, BlockTuneName, BlockTuneSerialized, createBlockTuneName } from './types'; /** * BlockTune class represents a set of additional information associated with a BlockNode. * This information can be used by a BlockTool to modify the behavior of the BlockNode. */ export class BlockTune { /** * Private field representing the name of the tune */ #name: BlockTuneName; /** * Private field representing any additional data associated with the tune */ #data: Record<string, unknown>; /** * Constructor for BlockTune class. * * @param args - BlockTune constructor arguments. * @param args.name - The name of the tune. * @param args.data - Any additional data associated with the tune. */ constructor({ name, data }: BlockTuneConstructorParameters) { this.#name = name; this.#data = data; } /** * Updates data associated with the tune. * * @param key - The key of the data to update * @param value - The value to update the data with */ public update(key: string, value: unknown): void { this.#data[key] = value; } /** * Returns serialized version of the BlockTune. */ public get serialized(
): BlockTuneSerialized {
return { name: this.#name, data: this.#data, }; } } export { BlockTuneName, createBlockTuneName }; export type { BlockTuneSerialized };
src/entities/BlockTune/index.ts
editor-js-document-model-4cb9623
[ { "filename": "src/entities/ValueNode/index.ts", "retrieved_chunk": " /**\n * Updates the data associated with this value node.\n *\n * @param value - The new value of this value node.\n */\n public update(value: ValueType): void {\n this.#value = value;\n }\n /**\n * Returns serialized data associated with this value node.", "score": 0.9030252695083618 }, { "filename": "src/entities/BlockTune/types/BlockTuneSerialized.ts", "retrieved_chunk": "/**\n * BlockTuneSerialized represents a serialized version of a BlockTune.\n */\nexport interface BlockTuneSerialized {\n /**\n * The name of the tune.\n * Serialized as a string.\n */\n name: string;\n /**", "score": 0.891826868057251 }, { "filename": "src/entities/BlockTune/types/BlockTuneConstructorParameters.ts", "retrieved_chunk": "import { BlockTuneName } from './BlockTuneName';\nexport interface BlockTuneConstructorParameters {\n /**\n * The name of the tune\n */\n name: BlockTuneName;\n /**\n * Any additional data associated with the tune\n */\n data: Record<string, unknown>;", "score": 0.884552538394928 }, { "filename": "src/entities/BlockNode/types/BlockNodeSerialized.ts", "retrieved_chunk": " * The content of the Block\n */\n data: Record<string, unknown>; // @todo replace unknown type with serialized root node and value node\n /**\n * Serialized BlockTunes associated with the BlockNode\n */\n tunes: Record<string, BlockTuneSerialized>;\n}", "score": 0.8829658031463623 }, { "filename": "src/entities/BlockNode/types/BlockNodeSerialized.ts", "retrieved_chunk": "import { BlockTuneSerialized } from '../../BlockTune';\n/**\n * Serialized version of the BlockNode\n */\nexport interface BlockNodeSerialized {\n /**\n * The name of the tool created a Block\n */\n name: string;\n /**", "score": 0.8726823329925537 } ]
typescript
): BlockTuneSerialized {
import { FormattingNode, InlineToolName, InlineToolData } from '../FormattingNode'; import { TextNodeConstructorParameters } from './types'; import { ChildNode, InlineNode, InlineNodeSerialized } from '../interfaces'; export * from './types'; export interface TextNode extends ChildNode {} /** * TextNode class represents a node in a tree-like structure, used to store and manipulate text content. */ @ChildNode export class TextNode implements InlineNode { /** * Private field representing the text content of the node */ #value: string; /** * Constructor for TextNode class * * @param args - TextNode constructor arguments. * @param args.value - Text content of the node. */ constructor({ value = '' }: TextNodeConstructorParameters = {}) { this.#value = value; } /** * Returns length of the text */ public get length(): number { return this.#value.length; } /** * Returns serialized value of the node */ public get serialized(): InlineNodeSerialized { return { text: this.getText(), // No fragments for text node fragments: [], }; } /** * Inserts text to specified position. By default, appends new text to the current value * * @param text - text to insert * @param [index] - char start index */ public insertText(text: string, index = this.length): void { this.#validateIndex(index); this.#value = this.#value.slice(0, index) + text + this.#value.slice(index); } /** * Remove text from specified range * * @param [start] - start char index of the range, 0 by default * @param [end] - end char index of the range, text length by default * @returns {string} removed text */ public removeText(start = 0, end = this.length): string { this.#validateIndex(start); this.#validateIndex(end); const removedValue = this.#value.slice(start, end); this.#value = this.#value.slice(0, start) + this.#value.slice(end); if (this.length === 0) { this.remove(); } return removedValue; } /** * Returns text value from the specified range * * @param [start] - start char index of the range, 0 by default * @param [end] - end char index of the range, text length by default */ public getText(start = 0, end = this.length): string { if (start > end) { // Stryker disable next-line StringLiteral throw new Error(`Start index ${start} should be less or equal than end index ${end}`); } this.#validateIndex(start); this.#validateIndex(end); return this.#value.slice(start, end); } /** * Applies inline tool for specified range * * @param tool - name of the tool to apply * @param start - start char index of the range * @param end - end char index of the range * @param [data] - inline tool data if applicable * @returns {InlineNode[]} - array of nodes after applied formatting */ public format(tool: InlineToolName, start: number, end: number, data?: InlineToolData): InlineNode[] { this.#validateIndex(start); this.#validateIndex(end); const formattingNode = new FormattingNode({ tool, data, }); const fragments: ChildNode[] = []; /** * If start index is greater than 0, we need extract part of the text before the start index */ if (start > 0) { fragments.push(this.#cloneContents(0, start)); } /** * Formatting is applied to the specified range */ const formattedFragment = this.#cloneContents(start, end); formattedFragment.appendTo(formattingNode); fragments.push(formattingNode); /** * If end index is less than the text length, we need to extract part of the text after the end index */ if (end < this.length) { fragments.push(this.#cloneContents(end, this.length)); } this
.parent?.insertAfter(this, ...fragments);
this.remove(); return fragments; } /** * Splits current node into two nodes by the specified index * * @param index - char index where to split * @returns {TextNode|null} - new node or null if split is not applicable */ public split(index: number): TextNode | null { if (index === 0 || index === this.length) { return null; } const newNode = new TextNode(); const text = this.removeText(index); newNode.insertText(text); this.parent?.insertAfter(this, newNode); return newNode; } /** * Validates index * * @param index - char index to validate * @throws Error if index is out of the text length */ #validateIndex(index: number): void { if (index < 0 || index > this.length) { // Stryker disable next-line StringLiteral throw new Error(`Index ${index} is not in valid range [0, ${this.length}]`); } } /** * Clones specified range to a new TextNode * * @param start - start char index of the range * @param end - end char index of the range */ #cloneContents(start: number, end: number): TextNode { return new TextNode({ value: this.getText(start, end), }); } }
src/entities/TextNode/index.ts
editor-js-document-model-4cb9623
[ { "filename": "src/entities/FormattingNode/index.ts", "retrieved_chunk": " if (splitNode || (index - offset === childLength)) {\n midNodeIndex += 1;\n }\n newNode.append(...this.children.slice(midNodeIndex));\n this.parent?.insertAfter(this, newNode);\n return newNode;\n }\n /**\n * Applies formatting to the text with specified inline tool in the specified range\n *", "score": 0.8587389588356018 }, { "filename": "src/entities/FormattingNode/index.ts", "retrieved_chunk": " }\n /**\n * Inserts text to the specified index, by default appends text to the end of the current value\n *\n * @param text - text to insert\n * @param [index] - char index where to insert text\n */\n public insertText(text: string, index = this.length): void {\n const [child, offset] = this.#findChildByIndex(index);\n child?.insertText(text, index - offset);", "score": 0.8302919268608093 }, { "filename": "src/entities/FormattingNode/index.ts", "retrieved_chunk": " start,\n end,\n (acc, child, childStart, childEnd) => {\n return acc + child.removeText(childStart, childEnd);\n },\n ''\n );\n if (this.length === 0) {\n this.remove();\n }", "score": 0.8232051730155945 }, { "filename": "src/entities/FormattingNode/index.ts", "retrieved_chunk": " * If child is not a FormattingNode, it doesn't include any fragments. So we skip it.\n */\n if (!(child instanceof FormattingNode)) {\n return acc;\n }\n acc.push(...child.getFragments(childStart, childEnd));\n return acc;\n },\n [ {\n tool: this.#tool,", "score": 0.8210924863815308 }, { "filename": "src/entities/FormattingNode/index.ts", "retrieved_chunk": " result = callback(result, child, Math.max(start, 0), Math.min(child.length, end));\n }\n start -= child.length;\n end -= child.length;\n }\n return result;\n }\n /**\n * Returns child by passed text index\n *", "score": 0.8086227178573608 } ]
typescript
.parent?.insertAfter(this, ...fragments);
import { EditorDocument } from '../EditorDocument'; import { BlockTune, BlockTuneName, BlockTuneSerialized } from '../BlockTune'; import { BlockNodeConstructorParameters, BlockNodeName, createBlockNodeName, DataKey, createDataKey, BlockNodeData, BlockNodeSerialized } from './types'; /** * BlockNode class represents a node in a tree-like structure used to store and manipulate Blocks in an editor document. * A BlockNode can contain one or more child nodes of type TextNode, ValueNode or FormattingNode. * It can also be associated with one or more BlockTunes, which can modify the behavior of the BlockNode. */ export class BlockNode { /** * Field representing a name of the Tool created this Block */ #name: BlockNodeName; /** * Field representing the content of the Block */ #data: BlockNodeData; /** * Field representing the parent EditorDocument of the BlockNode */ #parent: EditorDocument; /** * Private field representing the BlockTunes associated with the BlockNode */ #tunes: Record<BlockTuneName, BlockTune>; /** * Constructor for BlockNode class. * * @param args - TextNode constructor arguments. * @param args.name - The name of the BlockNode. * @param args.data - The content of the BlockNode. * @param args.parent - The parent EditorDocument of the BlockNode. * @param args.tunes - The BlockTunes associated with the BlockNode. */ constructor({ name, data, parent, tunes = {} }: BlockNodeConstructorParameters) { this.#name = name; this.#data = data; this.#parent = parent; this.#tunes = tunes; } /** * Returns serialized object representing the BlockNode */ public get serialized(): BlockNodeSerialized { const serializedData = Object .entries(this.#data) .reduce( (acc, [dataKey, value]) => { /** * If the value is an array, we need to serialize each node in the array * Value is an array if the BlockNode contains TextNodes and FormattingNodes * After serializing there will be InlineNodeSerialized object */ if (value instanceof Array) { acc[dataKey] = value.map((node) => node.serialized); return acc; } acc[dataKey] = value.serialized; return acc; }, {} as Record<string, unknown> ); const serializedTunes = Object .entries(this.#tunes) .reduce( (acc, [name, tune]) => {
acc[name] = tune.serialized;
return acc; }, {} as Record<string, BlockTuneSerialized> ); return { name: this.#name, data: serializedData, tunes: serializedTunes, }; } } export { BlockNodeName, createBlockNodeName, DataKey, createDataKey };
src/entities/BlockNode/index.ts
editor-js-document-model-4cb9623
[ { "filename": "src/entities/BlockTune/index.ts", "retrieved_chunk": " */\n public update(key: string, value: unknown): void {\n this.#data[key] = value;\n }\n /**\n * Returns serialized version of the BlockTune.\n */\n public get serialized(): BlockTuneSerialized {\n return {\n name: this.#name,", "score": 0.7889516353607178 }, { "filename": "src/entities/BlockNode/BlockNode.spec.ts", "retrieved_chunk": " createBlockTuneName('align'),\n createBlockTuneName('font-size'),\n createBlockTuneName('font-weight'),\n ];\n const blockTunes = blockTunesNames.reduce((acc, name) => ({\n ...acc,\n [name]: new BlockTune({} as BlockTuneConstructorParameters),\n }), {});\n const spyArray = Object\n .values(blockTunes)", "score": 0.7812715768814087 }, { "filename": "src/entities/BlockTune/BlockTune.spec.ts", "retrieved_chunk": " // Act\n const tuneSerialized = tune.serialized;\n // Assert\n expect(tuneSerialized).toEqual(\n {\n name: 'styling',\n data: {\n background: 'transparent',\n },\n }", "score": 0.777783989906311 }, { "filename": "src/utils/mocks/createBlockTuneMock.ts", "retrieved_chunk": " data?: Record<string, unknown>,\n}): BlockTune {\n return new BlockTune(\n {\n name: name || createBlockTuneName('aligning'),\n data: data || {},\n }\n );\n}", "score": 0.7734702229499817 }, { "filename": "src/entities/BlockNode/BlockNode.spec.ts", "retrieved_chunk": " const blockNode = new BlockNode({\n name: blockNodeName,\n data: {},\n parent: {} as EditorDocument,\n });\n const serialized = blockNode.serialized;\n expect(serialized.name).toEqual(blockNodeName);\n });\n it('should call .serialized getter of all tunes associated with the BlockNode', () => {\n const blockTunesNames = [", "score": 0.7553431987762451 } ]
typescript
acc[name] = tune.serialized;
import { BlockNode } from './index'; import { createBlockNodeName, createDataKey } from './types'; import { BlockTune, createBlockTuneName } from '../BlockTune'; import { TextNode } from '../TextNode'; import { ValueNode } from '../ValueNode'; import type { EditorDocument } from '../EditorDocument'; import type { BlockTuneConstructorParameters } from '../BlockTune/types'; import type { TextNodeConstructorParameters } from '../TextNode'; import type { ValueNodeConstructorParameters } from '../ValueNode'; describe('BlockNode', () => { describe('.serialized', () => { beforeEach(() => { jest.mock('../BlockTune', () => ({ BlockTune: jest.fn().mockImplementation(() => ({}) as BlockTune), serialized: jest.fn(), })); jest.mock('../TextNode', () => ({ TextNode: jest.fn().mockImplementation(() => ({}) as TextNode), serialized: jest.fn(), })); jest.mock('../ValueNode', () => ({ ValueNode: jest.fn().mockImplementation(() => ({}) as ValueNode), serialized: jest.fn(), })); }); afterEach(() => { jest.clearAllMocks(); }); it('should return a name of a tool that created a BlockNode', () => { const blockNodeName = createBlockNodeName('paragraph'); const blockNode = new BlockNode({ name: blockNodeName, data: {}, parent: {} as EditorDocument, }); const serialized = blockNode.serialized; expect(serialized.name).toEqual(blockNodeName); }); it('should call .serialized getter of all tunes associated with the BlockNode', () => { const blockTunesNames = [ createBlockTuneName('align'), createBlockTuneName('font-size'), createBlockTuneName('font-weight'), ]; const blockTunes = blockTunesNames.reduce((acc, name) => ({ ...acc, [name]: new BlockTune({} as BlockTuneConstructorParameters), }), {}); const spyArray = Object .values(blockTunes) .map((blockTune) => { return jest.spyOn(blockTune as BlockTune, 'serialized', 'get'); }); const blockNode = new BlockNode({ name: createBlockNodeName('paragraph'), data: {}, parent: {} as EditorDocument, tunes: blockTunes, }); blockNode.serialized; spyArray.forEach((spy) => { expect(spy).toHaveBeenCalled(); }); }); it('should call .serialized getter of all child ValueNodes associated with the BlockNode', () => { const countOfValueNodes = 2; const valueNodes = [ ...Array(countOfValueNodes).keys() ] .reduce((acc, index) => ({ ...acc, [
createDataKey(`data-key-${index}c${index}d`)]: new ValueNode({} as ValueNodeConstructorParameters), }), {});
const spyArray = Object .values(valueNodes) .map((valueNode) => { return jest.spyOn(valueNode as ValueNode, 'serialized', 'get'); }); const blockNode = new BlockNode({ name: createBlockNodeName('paragraph'), data: { ...valueNodes, }, parent: {} as EditorDocument, }); blockNode.serialized; spyArray.forEach((spy) => { expect(spy).toHaveBeenCalled(); }); }); it('should call .serialized getter of all child TextNodes associated with the BlockNode', () => { const countOfTextNodes = 3; const textNodes = [ ...Array(countOfTextNodes).keys() ] .map(() => new TextNode({} as TextNodeConstructorParameters)); const spyArray = textNodes .map((textNode) => { return jest.spyOn(textNode, 'serialized', 'get'); }); const blockNode = new BlockNode({ name: createBlockNodeName('paragraph'), data: { [createDataKey('data-key-1a2b')]: textNodes, }, parent: {} as EditorDocument, }); blockNode.serialized; spyArray.forEach((spy) => { expect(spy).toHaveBeenCalled(); }); }); }); });
src/entities/BlockNode/BlockNode.spec.ts
editor-js-document-model-4cb9623
[ { "filename": "src/entities/ValueNode/ValueNode.spec.ts", "retrieved_chunk": "import { createValueNodeMock } from '../../utils/mocks/createValueNodeMock';\ndescribe('ValueNode', () => {\n describe('.update()', () => {\n it('should update existing data associated with this value node', () => {\n // Arrange\n const longitudeValueNode = createValueNodeMock({\n value: 23.123,\n });\n const updatedLongitude = 23.456;\n // Act", "score": 0.8069531917572021 }, { "filename": "src/entities/FormattingNode/FormattingNode.spec.ts", "retrieved_chunk": " describe('.serialized', () => {\n it('should return concatenated text of all fragments with fragments list describing formatting', () => {\n const result = node.serialized;\n expect(result).toEqual({\n text: childMock.getText() + anotherChildMock.getText(),\n fragments: [\n {\n tool,\n data,\n range: [0, node.length],", "score": 0.7941977977752686 }, { "filename": "src/entities/TextNode/TextNode.spec.ts", "retrieved_chunk": " node.split(index);\n expect(node.getText()).toEqual(initialText.slice(0, index));\n });\n it('should insert new node to the parent', () => {\n const newNode = node.split(index);\n expect(parentMock.insertAfter).toBeCalledWith(node, newNode);\n });\n });\n describe('.serialized', () => {\n it('should return text value and empty array of fragments', () => {", "score": 0.7918734550476074 }, { "filename": "src/entities/ValueNode/ValueNode.spec.ts", "retrieved_chunk": " longitudeValueNode.update(updatedLongitude);\n // Assert\n expect(longitudeValueNode.serialized).toBe(updatedLongitude);\n });\n });\n describe('.serialized', () => {\n it('should return the serialized data associated with this value node', () => {\n // Arrange\n const longitude = 23.123;\n const longitudeValueNode = createValueNodeMock({", "score": 0.7917789816856384 }, { "filename": "src/entities/BlockNode/index.ts", "retrieved_chunk": " public get serialized(): BlockNodeSerialized {\n const serializedData = Object\n .entries(this.#data)\n .reduce(\n (acc, [dataKey, value]) => {\n /**\n * If the value is an array, we need to serialize each node in the array\n * Value is an array if the BlockNode contains TextNodes and FormattingNodes\n * After serializing there will be InlineNodeSerialized object\n */", "score": 0.7917149066925049 } ]
typescript
createDataKey(`data-key-${index}c${index}d`)]: new ValueNode({} as ValueNodeConstructorParameters), }), {});
import { BlockNode } from './index'; import { createBlockNodeName, createDataKey } from './types'; import { BlockTune, createBlockTuneName } from '../BlockTune'; import { TextNode } from '../TextNode'; import { ValueNode } from '../ValueNode'; import type { EditorDocument } from '../EditorDocument'; import type { BlockTuneConstructorParameters } from '../BlockTune/types'; import type { TextNodeConstructorParameters } from '../TextNode'; import type { ValueNodeConstructorParameters } from '../ValueNode'; describe('BlockNode', () => { describe('.serialized', () => { beforeEach(() => { jest.mock('../BlockTune', () => ({ BlockTune: jest.fn().mockImplementation(() => ({}) as BlockTune), serialized: jest.fn(), })); jest.mock('../TextNode', () => ({ TextNode: jest.fn().mockImplementation(() => ({}) as TextNode), serialized: jest.fn(), })); jest.mock('../ValueNode', () => ({ ValueNode: jest.fn().mockImplementation(() => ({}) as ValueNode), serialized: jest.fn(), })); }); afterEach(() => { jest.clearAllMocks(); }); it('should return a name of a tool that created a BlockNode', () => { const blockNodeName = createBlockNodeName('paragraph'); const blockNode = new BlockNode({ name: blockNodeName, data: {}, parent: {} as EditorDocument, }); const serialized = blockNode.serialized; expect(serialized.name).toEqual(blockNodeName); }); it('should call .serialized getter of all tunes associated with the BlockNode', () => { const blockTunesNames = [ createBlockTuneName('align'), createBlockTuneName('font-size'), createBlockTuneName('font-weight'), ]; const blockTunes = blockTunesNames.reduce((acc, name) => ({ ...acc, [name]: new BlockTune({} as BlockTuneConstructorParameters), }), {}); const spyArray = Object .values(blockTunes) .map((blockTune) => { return jest.spyOn(
blockTune as BlockTune, 'serialized', 'get');
}); const blockNode = new BlockNode({ name: createBlockNodeName('paragraph'), data: {}, parent: {} as EditorDocument, tunes: blockTunes, }); blockNode.serialized; spyArray.forEach((spy) => { expect(spy).toHaveBeenCalled(); }); }); it('should call .serialized getter of all child ValueNodes associated with the BlockNode', () => { const countOfValueNodes = 2; const valueNodes = [ ...Array(countOfValueNodes).keys() ] .reduce((acc, index) => ({ ...acc, [createDataKey(`data-key-${index}c${index}d`)]: new ValueNode({} as ValueNodeConstructorParameters), }), {}); const spyArray = Object .values(valueNodes) .map((valueNode) => { return jest.spyOn(valueNode as ValueNode, 'serialized', 'get'); }); const blockNode = new BlockNode({ name: createBlockNodeName('paragraph'), data: { ...valueNodes, }, parent: {} as EditorDocument, }); blockNode.serialized; spyArray.forEach((spy) => { expect(spy).toHaveBeenCalled(); }); }); it('should call .serialized getter of all child TextNodes associated with the BlockNode', () => { const countOfTextNodes = 3; const textNodes = [ ...Array(countOfTextNodes).keys() ] .map(() => new TextNode({} as TextNodeConstructorParameters)); const spyArray = textNodes .map((textNode) => { return jest.spyOn(textNode, 'serialized', 'get'); }); const blockNode = new BlockNode({ name: createBlockNodeName('paragraph'), data: { [createDataKey('data-key-1a2b')]: textNodes, }, parent: {} as EditorDocument, }); blockNode.serialized; spyArray.forEach((spy) => { expect(spy).toHaveBeenCalled(); }); }); }); });
src/entities/BlockNode/BlockNode.spec.ts
editor-js-document-model-4cb9623
[ { "filename": "src/entities/BlockNode/index.ts", "retrieved_chunk": " .entries(this.#tunes)\n .reduce(\n (acc, [name, tune]) => {\n acc[name] = tune.serialized;\n return acc;\n },\n {} as Record<string, BlockTuneSerialized>\n );\n return {\n name: this.#name,", "score": 0.836607813835144 }, { "filename": "src/entities/BlockTune/BlockTune.spec.ts", "retrieved_chunk": " });\n describe('.serialized', () => {\n it('should return serialized version of the BlockTune', () => {\n // Arrange\n const tune = createBlockTuneMock({\n name: createBlockTuneName('styling'),\n data: {\n background: 'transparent',\n },\n });", "score": 0.824059784412384 }, { "filename": "src/entities/BlockTune/BlockTune.spec.ts", "retrieved_chunk": "import { createBlockTuneName } from './index';\nimport { createBlockTuneMock } from '../../utils/mocks/createBlockTuneMock';\ndescribe('BlockTune', () => {\n describe('.update()', () => {\n it('should add field to data object by key if it doesn\\'t exist', () => {\n // Arrange\n const blockTune = createBlockTuneMock({\n data: {},\n });\n // Act", "score": 0.8059436082839966 }, { "filename": "src/utils/mocks/createBlockTuneMock.ts", "retrieved_chunk": " data?: Record<string, unknown>,\n}): BlockTune {\n return new BlockTune(\n {\n name: name || createBlockTuneName('aligning'),\n data: data || {},\n }\n );\n}", "score": 0.7999228239059448 }, { "filename": "src/entities/BlockNode/index.ts", "retrieved_chunk": " if (value instanceof Array) {\n acc[dataKey] = value.map((node) => node.serialized);\n return acc;\n }\n acc[dataKey] = value.serialized;\n return acc;\n },\n {} as Record<string, unknown>\n );\n const serializedTunes = Object", "score": 0.7835323810577393 } ]
typescript
blockTune as BlockTune, 'serialized', 'get');
import { BlockNode } from './index'; import { createBlockNodeName, createDataKey } from './types'; import { BlockTune, createBlockTuneName } from '../BlockTune'; import { TextNode } from '../TextNode'; import { ValueNode } from '../ValueNode'; import type { EditorDocument } from '../EditorDocument'; import type { BlockTuneConstructorParameters } from '../BlockTune/types'; import type { TextNodeConstructorParameters } from '../TextNode'; import type { ValueNodeConstructorParameters } from '../ValueNode'; describe('BlockNode', () => { describe('.serialized', () => { beforeEach(() => { jest.mock('../BlockTune', () => ({ BlockTune: jest.fn().mockImplementation(() => ({}) as BlockTune), serialized: jest.fn(), })); jest.mock('../TextNode', () => ({ TextNode: jest.fn().mockImplementation(() => ({}) as TextNode), serialized: jest.fn(), })); jest.mock('../ValueNode', () => ({ ValueNode: jest.fn().mockImplementation(() => ({}) as ValueNode), serialized: jest.fn(), })); }); afterEach(() => { jest.clearAllMocks(); }); it('should return a name of a tool that created a BlockNode', () => { const blockNodeName = createBlockNodeName('paragraph'); const blockNode = new BlockNode({ name: blockNodeName, data: {}, parent: {} as EditorDocument, }); const serialized = blockNode.serialized; expect(serialized.name).toEqual(blockNodeName); }); it('should call .serialized getter of all tunes associated with the BlockNode', () => { const blockTunesNames = [ createBlockTuneName('align'), createBlockTuneName('font-size'), createBlockTuneName('font-weight'), ]; const blockTunes = blockTunesNames.reduce((acc, name) => ({ ...acc, [name]: new BlockTune({} as BlockTuneConstructorParameters), }), {}); const spyArray = Object .values(blockTunes) .map((blockTune) => { return jest.spyOn(blockTune as BlockTune, 'serialized', 'get'); }); const blockNode = new BlockNode({ name: createBlockNodeName('paragraph'), data: {}, parent: {} as EditorDocument, tunes: blockTunes, }); blockNode.serialized; spyArray.forEach((spy) => { expect(spy).toHaveBeenCalled(); }); }); it('should call .serialized getter of all child ValueNodes associated with the BlockNode', () => { const countOfValueNodes = 2; const valueNodes = [ ...Array(countOfValueNodes).keys() ] .reduce((acc, index) => ({ ...acc, [createDataKey(`data-key-${index}c${index}d`)]: new ValueNode({} as ValueNodeConstructorParameters), }), {}); const spyArray = Object .values(valueNodes) .map((valueNode) => { return jest.spyOn(valueNode as ValueNode, 'serialized', 'get'); }); const blockNode = new BlockNode({ name: createBlockNodeName('paragraph'), data: { ...valueNodes, }, parent: {} as EditorDocument, }); blockNode.serialized; spyArray.forEach((spy) => { expect(spy).toHaveBeenCalled(); }); }); it('should call .serialized getter of all child TextNodes associated with the BlockNode', () => { const countOfTextNodes = 3; const textNodes = [ ...Array(countOfTextNodes).keys() ]
.map(() => new TextNode({} as TextNodeConstructorParameters));
const spyArray = textNodes .map((textNode) => { return jest.spyOn(textNode, 'serialized', 'get'); }); const blockNode = new BlockNode({ name: createBlockNodeName('paragraph'), data: { [createDataKey('data-key-1a2b')]: textNodes, }, parent: {} as EditorDocument, }); blockNode.serialized; spyArray.forEach((spy) => { expect(spy).toHaveBeenCalled(); }); }); }); });
src/entities/BlockNode/BlockNode.spec.ts
editor-js-document-model-4cb9623
[ { "filename": "src/entities/FormattingNode/FormattingNode.spec.ts", "retrieved_chunk": " removeSpy.mockRestore();\n lengthSpy.mockRestore();\n });\n });\n describe('.getText()', () => {\n const start = 3;\n const end = 5;\n it('should call getText() for the relevant child', () => {\n node.getText(start, end);\n expect(childMock.getText).toBeCalledWith(start, end);", "score": 0.8750369548797607 }, { "filename": "src/entities/TextNode/TextNode.spec.ts", "retrieved_chunk": " node.split(index);\n expect(node.getText()).toEqual(initialText.slice(0, index));\n });\n it('should insert new node to the parent', () => {\n const newNode = node.split(index);\n expect(parentMock.insertAfter).toBeCalledWith(node, newNode);\n });\n });\n describe('.serialized', () => {\n it('should return text value and empty array of fragments', () => {", "score": 0.8660053014755249 }, { "filename": "src/entities/FormattingNode/FormattingNode.spec.ts", "retrieved_chunk": " */\n it('should create new FormattingNode with children split from the original one', () => {\n const newNode = node.split(childMock.length);\n expect(newNode?.children).toEqual([ anotherChildMock ]);\n });\n it('should call split method of child containing the specified index', () => {\n node.split(index);\n expect(childMock.split).toBeCalledWith(index);\n });\n it('should insert new node to the parent', () => {", "score": 0.855080246925354 }, { "filename": "src/entities/TextNode/TextNode.spec.ts", "retrieved_chunk": " const formattingNode = fragments[1] as FormattingNode;\n expect(formattingNode.children[0].getText()).toEqual(initialText.slice(start, end));\n });\n it('should call parent\\'s insertAfter with new nodes', () => {\n const name = createInlineToolName('bold');\n const start = 5;\n const end = 8;\n const fragments = node.format(name, start, end);\n expect(parentMock.insertAfter).toBeCalledWith(node, ...fragments);\n });", "score": 0.8547112941741943 }, { "filename": "src/entities/FormattingNode/FormattingNode.spec.ts", "retrieved_chunk": " });\n });\n describe('.removeText()', () => {\n const start = 3;\n const end = 5;\n it('should remove text from appropriate child', () => {\n node.removeText(start, end);\n expect(childMock.removeText).toBeCalledWith(start, end);\n });\n it('should adjust indices by child offset', () => {", "score": 0.8518182635307312 } ]
typescript
.map(() => new TextNode({} as TextNodeConstructorParameters));
import { FormattingNode, InlineToolName, InlineToolData } from '../FormattingNode'; import { TextNodeConstructorParameters } from './types'; import { ChildNode, InlineNode, InlineNodeSerialized } from '../interfaces'; export * from './types'; export interface TextNode extends ChildNode {} /** * TextNode class represents a node in a tree-like structure, used to store and manipulate text content. */ @ChildNode export class TextNode implements InlineNode { /** * Private field representing the text content of the node */ #value: string; /** * Constructor for TextNode class * * @param args - TextNode constructor arguments. * @param args.value - Text content of the node. */ constructor({ value = '' }: TextNodeConstructorParameters = {}) { this.#value = value; } /** * Returns length of the text */ public get length(): number { return this.#value.length; } /** * Returns serialized value of the node */ public get serialized(): InlineNodeSerialized { return { text: this.getText(), // No fragments for text node fragments: [], }; } /** * Inserts text to specified position. By default, appends new text to the current value * * @param text - text to insert * @param [index] - char start index */ public insertText(text: string, index = this.length): void { this.#validateIndex(index); this.#value = this.#value.slice(0, index) + text + this.#value.slice(index); } /** * Remove text from specified range * * @param [start] - start char index of the range, 0 by default * @param [end] - end char index of the range, text length by default * @returns {string} removed text */ public removeText(start = 0, end = this.length): string { this.#validateIndex(start); this.#validateIndex(end); const removedValue = this.#value.slice(start, end); this.#value = this.#value.slice(0, start) + this.#value.slice(end); if (this.length === 0) { this.remove(); } return removedValue; } /** * Returns text value from the specified range * * @param [start] - start char index of the range, 0 by default * @param [end] - end char index of the range, text length by default */ public getText(start = 0, end = this.length): string { if (start > end) { // Stryker disable next-line StringLiteral throw new Error(`Start index ${start} should be less or equal than end index ${end}`); } this.#validateIndex(start); this.#validateIndex(end); return this.#value.slice(start, end); } /** * Applies inline tool for specified range * * @param tool - name of the tool to apply * @param start - start char index of the range * @param end - end char index of the range * @param [data] - inline tool data if applicable * @returns {InlineNode[]} - array of nodes after applied formatting */ public format(tool: InlineToolName, start: number, end: number, data?: InlineToolData): InlineNode[] { this.#validateIndex(start); this.#validateIndex(end); const formattingNode = new FormattingNode({ tool, data, }); const fragments: ChildNode[] = []; /** * If start index is greater than 0, we need extract part of the text before the start index */ if (start > 0) { fragments.push(this.#cloneContents(0, start)); } /** * Formatting is applied to the specified range */ const formattedFragment = this.#cloneContents(start, end); formattedFragment.
appendTo(formattingNode);
fragments.push(formattingNode); /** * If end index is less than the text length, we need to extract part of the text after the end index */ if (end < this.length) { fragments.push(this.#cloneContents(end, this.length)); } this.parent?.insertAfter(this, ...fragments); this.remove(); return fragments; } /** * Splits current node into two nodes by the specified index * * @param index - char index where to split * @returns {TextNode|null} - new node or null if split is not applicable */ public split(index: number): TextNode | null { if (index === 0 || index === this.length) { return null; } const newNode = new TextNode(); const text = this.removeText(index); newNode.insertText(text); this.parent?.insertAfter(this, newNode); return newNode; } /** * Validates index * * @param index - char index to validate * @throws Error if index is out of the text length */ #validateIndex(index: number): void { if (index < 0 || index > this.length) { // Stryker disable next-line StringLiteral throw new Error(`Index ${index} is not in valid range [0, ${this.length}]`); } } /** * Clones specified range to a new TextNode * * @param start - start char index of the range * @param end - end char index of the range */ #cloneContents(start: number, end: number): TextNode { return new TextNode({ value: this.getText(start, end), }); } }
src/entities/TextNode/index.ts
editor-js-document-model-4cb9623
[ { "filename": "src/entities/FormattingNode/index.ts", "retrieved_chunk": " * If child is not a FormattingNode, it doesn't include any fragments. So we skip it.\n */\n if (!(child instanceof FormattingNode)) {\n return acc;\n }\n acc.push(...child.getFragments(childStart, childEnd));\n return acc;\n },\n [ {\n tool: this.#tool,", "score": 0.8512294292449951 }, { "filename": "src/entities/FormattingNode/index.ts", "retrieved_chunk": " if (splitNode || (index - offset === childLength)) {\n midNodeIndex += 1;\n }\n newNode.append(...this.children.slice(midNodeIndex));\n this.parent?.insertAfter(this, newNode);\n return newNode;\n }\n /**\n * Applies formatting to the text with specified inline tool in the specified range\n *", "score": 0.8266111016273499 }, { "filename": "src/entities/FormattingNode/index.ts", "retrieved_chunk": " return this.children.reduce((sum, child) => sum + child.length, 0);\n }\n /**\n * Returns serialized value of the node: text and formatting fragments\n */\n public get serialized(): InlineNodeSerialized {\n return {\n text: this.getText(),\n fragments: this.getFragments(),\n };", "score": 0.8117595911026001 }, { "filename": "src/entities/FormattingNode/index.ts", "retrieved_chunk": " */\n public split(index: number): FormattingNode | null {\n if (index === 0 || index === this.length) {\n return null;\n }\n const newNode = new FormattingNode({\n tool: this.#tool,\n data: this.#data,\n });\n const [child, offset] = this.#findChildByIndex(index);", "score": 0.8072571754455566 }, { "filename": "src/entities/TextNode/TextNode.spec.ts", "retrieved_chunk": " });\n describe('.format()', () => {\n it('should return just one FormattingNode, if formatting full TextNode', () => {\n const name = createInlineToolName('bold');\n const fragments = node.format(name, 0, initialText.length);\n expect(fragments).toHaveLength(1);\n expect(fragments[0]).toBeInstanceOf(FormattingNode);\n });\n it('should return two fragments if formatting from the start, but not to the end', () => {\n const name = createInlineToolName('bold');", "score": 0.8056184649467468 } ]
typescript
appendTo(formattingNode);
import { createBlockTuneName } from './index'; import { createBlockTuneMock } from '../../utils/mocks/createBlockTuneMock'; describe('BlockTune', () => { describe('.update()', () => { it('should add field to data object by key if it doesn\'t exist', () => { // Arrange const blockTune = createBlockTuneMock({ data: {}, }); // Act blockTune.update('align', 'left'); // Assert expect(blockTune.serialized.data).toEqual({ align: 'left', }); }); it('should update field in data object by key', () => { // Arrange const blockTune = createBlockTuneMock({ data: { align: 'center', }, }); // Act blockTune.update('align', 'right'); // Assert expect(blockTune.serialized.data).toEqual({ align: 'right', }); }); }); describe('.serialized', () => { it('should return serialized version of the BlockTune', () => { // Arrange const tune = createBlockTuneMock({
name: createBlockTuneName('styling'), data: {
background: 'transparent', }, }); // Act const tuneSerialized = tune.serialized; // Assert expect(tuneSerialized).toEqual( { name: 'styling', data: { background: 'transparent', }, } ); }); }); });
src/entities/BlockTune/BlockTune.spec.ts
editor-js-document-model-4cb9623
[ { "filename": "src/entities/BlockNode/BlockNode.spec.ts", "retrieved_chunk": " jest.mock('../ValueNode', () => ({\n ValueNode: jest.fn().mockImplementation(() => ({}) as ValueNode),\n serialized: jest.fn(),\n }));\n });\n afterEach(() => {\n jest.clearAllMocks();\n });\n it('should return a name of a tool that created a BlockNode', () => {\n const blockNodeName = createBlockNodeName('paragraph');", "score": 0.8904741406440735 }, { "filename": "src/entities/BlockNode/BlockNode.spec.ts", "retrieved_chunk": " const blockNode = new BlockNode({\n name: blockNodeName,\n data: {},\n parent: {} as EditorDocument,\n });\n const serialized = blockNode.serialized;\n expect(serialized.name).toEqual(blockNodeName);\n });\n it('should call .serialized getter of all tunes associated with the BlockNode', () => {\n const blockTunesNames = [", "score": 0.8812741637229919 }, { "filename": "src/entities/EditorDocument/EditorDocument.spec.ts", "retrieved_chunk": " });\n describe('.addBlock()', () => {\n it('should add the block to the end of the document if index is not provided', () => {\n // Arrange\n const document = createEditorDocumentMockWithSomeBlocks();\n const block = createBlockNodeMock({\n parent: document,\n });\n // Act\n document.addBlock(block);", "score": 0.8665471076965332 }, { "filename": "src/entities/interfaces/ParentNode.spec.ts", "retrieved_chunk": " it('should append passed children to new parent', () => {\n const childMock = createChildMock();\n dummy = new Dummy({\n children: [ childMock ],\n });\n expect(childMock.appendTo).toBeCalledWith(dummy);\n });\n });\n describe('.children', () => {\n it('should return empty array by default', () => {", "score": 0.8636952638626099 }, { "filename": "src/entities/BlockNode/BlockNode.spec.ts", "retrieved_chunk": " .map((blockTune) => {\n return jest.spyOn(blockTune as BlockTune, 'serialized', 'get');\n });\n const blockNode = new BlockNode({\n name: createBlockNodeName('paragraph'),\n data: {},\n parent: {} as EditorDocument,\n tunes: blockTunes,\n });\n blockNode.serialized;", "score": 0.8576655387878418 } ]
typescript
name: createBlockTuneName('styling'), data: {
import { EditorDocument } from './index'; import { BlockNode } from '../BlockNode'; import { createBlockNodeMock } from '../../utils/mocks/createBlockNodeMock'; import { createEditorDocumentMock } from '../../utils/mocks/createEditorDocumentMock'; /** * Creates an EditorDocument object with some blocks for tests. */ function createEditorDocumentMockWithSomeBlocks(): EditorDocument { const document = createEditorDocumentMock(); const countOfBlocks = 3; for (let i = 0; i < countOfBlocks; i++) { const block = createBlockNodeMock({ parent: document, }); document.addBlock(block); } return document; } describe('EditorDocument', () => { describe('.length', () => { it('should return the number of blocks in the document', () => { // Arrange const blocksCount = 3; const document = new EditorDocument({ children: [], properties: { readOnly: false, }, }); for (let i = 0; i < blocksCount; i++) { const block = createBlockNodeMock({ parent: document, }); document.addBlock(block); } // Act const actual = document.length; // Assert expect(actual).toBe(blocksCount); }); }); describe('.addBlock()', () => { it('should add the block to the end of the document if index is not provided', () => { // Arrange const document = createEditorDocumentMockWithSomeBlocks(); const block = createBlockNodeMock({ parent: document, }); // Act document.addBlock(block); // Assert const lastBlock = document.
getBlock(document.length - 1);
expect(lastBlock).toBe(block); }); it('should add the block to the beginning of the document if index is 0', () => { // Arrange const document = createEditorDocumentMockWithSomeBlocks(); const block = createBlockNodeMock({ parent: document, }); // Act document.addBlock(block, 0); // Assert expect(document.getBlock(0)).toBe(block); }); it('should add the block to the specified index in the middle of the document', () => { // Arrange const document = createEditorDocumentMockWithSomeBlocks(); const block = createBlockNodeMock({ parent: document, }); // Act document.addBlock(block, 1); // Assert expect(document.getBlock(1)).toBe(block); }); it('should add the block to the end of the document if the index after the last element is passed', () => { // Arrange const document = createEditorDocumentMockWithSomeBlocks(); const block = createBlockNodeMock({ parent: document, }); // Act document.addBlock(block, document.length); // Assert const lastBlock = document.getBlock(document.length - 1); expect(lastBlock).toBe(block); }); it('should throw an error if index is less then 0', () => { // Arrange const document = createEditorDocumentMockWithSomeBlocks(); const block = createBlockNodeMock({ parent: document, }); // Act const action = (): void => document.addBlock(block, -1); // Assert expect(action).toThrowError('Index out of bounds'); }); it('should throw an error if index is greater then document length', () => { // Arrange const document = createEditorDocumentMockWithSomeBlocks(); const block = createBlockNodeMock({ parent: document, }); // Act const action = (): void => document.addBlock(block, document.length + 1); // Assert expect(action).toThrowError('Index out of bounds'); }); }); describe('.removeBlock()', () => { it('should remove the block from the beginning of the document if index 0 is passed', () => { // Arrange const document = createEditorDocumentMockWithSomeBlocks(); const block = document.getBlock(0); // Act document.removeBlock(0); // Assert expect(document.getBlock(0)).not.toBe(block); }); it('should remove the block from the specified index in the middle of the document', () => { // Arrange const document = createEditorDocumentMockWithSomeBlocks(); const block = document.getBlock(1); // Act document.removeBlock(1); // Assert expect(document.getBlock(1)).not.toBe(block); }); it('should remove the block from the end of the document if the last index is passed', () => { // Arrange const document = createEditorDocumentMockWithSomeBlocks(); const documentLengthBeforeRemove = document.length; // Act document.removeBlock(document.length - 1); // Assert expect(document.length).toBe(documentLengthBeforeRemove - 1); }); it('should throw an error if index is less then 0', () => { // Arrange const document = createEditorDocumentMockWithSomeBlocks(); // Act const action = (): void => document.removeBlock(-1); // Assert expect(action).toThrowError('Index out of bounds'); }); it('should throw an error if index is greater then document length', () => { // Arrange const document = createEditorDocumentMockWithSomeBlocks(); // Act const action = (): void => document.removeBlock(document.length); // Assert expect(action).toThrowError('Index out of bounds'); }); }); describe('.getBlock()', () => { it('should return the block from the specific index', () => { // Arrange const document = createEditorDocumentMock(); const countOfBlocks = 3; const blocks: BlockNode[] = []; for (let i = 0; i < countOfBlocks; i++) { const block = createBlockNodeMock({ parent: document, }); document.addBlock(block); blocks.push(block); } const index = 1; // Act const block = document.getBlock(index); // Assert expect(block).toBe(blocks[index]); }); it('should throw an error if index is less then 0', () => { // Arrange const document = createEditorDocumentMockWithSomeBlocks(); // Act const action = (): BlockNode => document.getBlock(-1); // Assert expect(action).toThrowError('Index out of bounds'); }); it('should throw an error if index is greater then document length', () => { // Arrange const document = createEditorDocumentMockWithSomeBlocks(); // Act const action = (): BlockNode => document.getBlock(document.length); // Assert expect(action).toThrowError('Index out of bounds'); }); }); });
src/entities/EditorDocument/EditorDocument.spec.ts
editor-js-document-model-4cb9623
[ { "filename": "src/utils/mocks/createEditorDocumentMock.ts", "retrieved_chunk": "import { EditorDocument } from '../../entities/EditorDocument';\n/**\n * Creates an EditorDocument object for tests.\n */\nexport function createEditorDocumentMock(): EditorDocument {\n return new EditorDocument({\n children: [],\n properties: {\n readOnly: false,\n },", "score": 0.7976216077804565 }, { "filename": "src/entities/BlockNode/BlockNode.spec.ts", "retrieved_chunk": " .map((blockTune) => {\n return jest.spyOn(blockTune as BlockTune, 'serialized', 'get');\n });\n const blockNode = new BlockNode({\n name: createBlockNodeName('paragraph'),\n data: {},\n parent: {} as EditorDocument,\n tunes: blockTunes,\n });\n blockNode.serialized;", "score": 0.7891492247581482 }, { "filename": "src/entities/TextNode/TextNode.spec.ts", "retrieved_chunk": " append: jest.fn(),\n children: [],\n } as unknown as ParentNode;\n let node: TextNode;\n beforeEach(() => {\n node = new TextNode({\n value: initialText,\n parent: parentMock as FormattingNode,\n });\n });", "score": 0.7868227958679199 }, { "filename": "src/entities/EditorDocument/index.ts", "retrieved_chunk": " public addBlock(blockNode: BlockNode, index?: number): void {\n if (index === undefined) {\n this.#children.push(blockNode);\n return;\n }\n this.#checkIndexOutOfBounds(index);\n this.#children.splice(index, 0, blockNode);\n }\n /**\n * Removes a BlockNode from the EditorDocument at the specified index.", "score": 0.7833150625228882 }, { "filename": "src/entities/FormattingNode/FormattingNode.spec.ts", "retrieved_chunk": " const childMock = createChildMock('Some text here. ');\n const anotherChildMock = createChildMock('Another text here.');\n const tool = createInlineToolName('bold');\n const anotherTool = createInlineToolName('italic');\n const data = createInlineToolData({});\n let node: FormattingNode;\n beforeEach(() => {\n node = new FormattingNode({\n tool,\n data,", "score": 0.7796070575714111 } ]
typescript
getBlock(document.length - 1);
import { describe, it, expect, beforeEach, jest } from '@jest/globals'; import { ParentNode } from './ParentNode'; import type { ChildNode } from './ChildNode'; const createChildMock = (): ChildNode => { return { appendTo: jest.fn(), remove: jest.fn(), parent: null, } as unknown as ChildNode; }; interface Dummy extends ParentNode { } /** * */ @ParentNode class Dummy { /** * * @param _options - dummy options */ // eslint-disable-next-line @typescript-eslint/no-empty-function,@typescript-eslint/no-unused-vars,no-unused-vars constructor(_options?: unknown) {} } describe('ParentNode decorator', () => { let dummy: Dummy; beforeEach(() => { dummy = new Dummy(); jest.resetAllMocks(); }); it('should add removeChild method to the decorated class', () => { expect(dummy.removeChild).toBeInstanceOf(Function); }); it('should add append method to the decorated class', () => { expect(dummy.append).toBeInstanceOf(Function); }); it('should add insertAfter method to the decorated class', () => {
expect(dummy.insertAfter).toBeInstanceOf(Function);
}); describe('constructor', () => { it('should append passed children to new parent', () => { const childMock = createChildMock(); dummy = new Dummy({ children: [ childMock ], }); expect(childMock.appendTo).toBeCalledWith(dummy); }); }); describe('.children', () => { it('should return empty array by default', () => { expect(dummy.children).toEqual([]); }); it('should return children passed via constructor', () => { const childMock = createChildMock(); dummy = new Dummy({ children: [ childMock ], }); expect(dummy.children).toEqual([ childMock ]); }); }); describe('.append()', () => { it('should add child to the children array', () => { const childMock = createChildMock(); dummy.append(childMock); expect(dummy.children).toContain(childMock); }); it('should add several children to the children array', () => { const childMock = createChildMock(); const anotherChildMock = createChildMock(); dummy.append(childMock, anotherChildMock); expect(dummy.children).toEqual([childMock, anotherChildMock]); }); it('should move a child to the end of children array if it is already there', () => { const childMock = createChildMock(); const anotherChildMock = createChildMock(); const oneMoreChildMock = createChildMock(); dummy = new Dummy({ children: [childMock, anotherChildMock, oneMoreChildMock], }); dummy.append(anotherChildMock); expect(dummy.children).toEqual([childMock, oneMoreChildMock, anotherChildMock]); }); it('should preserve already existing children', () => { const childMock = createChildMock(); const anotherChildMock = createChildMock(); const oneMoreChildMock = createChildMock(); dummy = new Dummy({ children: [childMock, anotherChildMock], }); dummy.append(oneMoreChildMock); expect(dummy.children).toEqual([childMock, anotherChildMock, oneMoreChildMock]); }); }); describe('.insertAfter()', () => { it('should insert a child after passed target', () => { const childMock = createChildMock(); const anotherChildMock = createChildMock(); const childMockToInsert = createChildMock(); dummy = new Dummy({ children: [childMock, anotherChildMock], }); dummy.insertAfter(childMock, childMockToInsert); expect(dummy.children).toEqual([childMock, childMockToInsert, anotherChildMock]); }); it('should insert several children after passed target', () => { const childMock = createChildMock(); const anotherChildMock = createChildMock(); const childMockToInsert = createChildMock(); const anotherChildMockToInsert = createChildMock(); dummy = new Dummy({ children: [childMock, anotherChildMock], }); dummy.insertAfter(childMock, childMockToInsert, anotherChildMockToInsert); expect(dummy.children).toEqual([childMock, childMockToInsert, anotherChildMockToInsert, anotherChildMock]); }); it('should remove existing child and insert it to the new place', () => { const childMock = createChildMock(); const anotherChildMock = createChildMock(); const oneMoreChildMock = createChildMock(); const childMockToInsert = createChildMock(); dummy = new Dummy({ children: [childMock, anotherChildMock, oneMoreChildMock, childMockToInsert], }); dummy.insertAfter(anotherChildMock, childMockToInsert); expect(dummy.children).toEqual([childMock, anotherChildMock, childMockToInsert, oneMoreChildMock]); }); }); describe('.removeChild()', () => { it('should remove child from the children array', () => { const childMock = createChildMock(); dummy = new Dummy({ children: [ childMock ], }); dummy.removeChild(childMock); expect(dummy.children).toHaveLength(0); }); it('should call remove method of child', () => { const childMock = createChildMock(); dummy = new Dummy({ children: [ childMock ], }); dummy.removeChild(childMock); expect(childMock.remove).toBeCalled(); }); }); });
src/entities/interfaces/ParentNode.spec.ts
editor-js-document-model-4cb9623
[ { "filename": "src/entities/interfaces/ChildNode.spec.ts", "retrieved_chunk": " expect(parentMock.append).toBeCalledWith(dummy);\n });\n it('should add remove method to the decorated class', () => {\n expect(dummy.remove).toBeInstanceOf(Function);\n });\n it('should add appendTo method to the decorated class', () => {\n expect(dummy.appendTo).toBeInstanceOf(Function);\n });\n describe('.parent', () => {\n it('should return null by default', () => {", "score": 0.9625750184059143 }, { "filename": "src/entities/interfaces/ChildNode.spec.ts", "retrieved_chunk": " dummy = new Dummy({\n parent: parentMock,\n });\n });\n it('should call parent\\'s removeChild method', () => {\n dummy.remove();\n expect(parentMock.removeChild).toBeCalledWith(dummy);\n });\n it('should set node\\'s parent to null', () => {\n dummy.remove();", "score": 0.8928881287574768 }, { "filename": "src/entities/interfaces/ChildNode.spec.ts", "retrieved_chunk": " expect(dummy.parent).toBeNull();\n });\n });\n describe('.appendTo()', () => {\n beforeEach(() => {\n dummy = new Dummy();\n });\n it('should call parent\\'s append method on appendTo call', () => {\n dummy.appendTo(parentMock);\n expect(parentMock.append).toBeCalledWith(dummy);", "score": 0.8819748163223267 }, { "filename": "src/entities/interfaces/ChildNode.spec.ts", "retrieved_chunk": " });\n it('should set node\\'s parent on appendTo call', () => {\n dummy.appendTo(parentMock);\n expect(dummy.parent).toBe(parentMock);\n });\n it('should do nothing if parents are the same', () => {\n const dummyWithParent = new Dummy({\n parent: parentMock,\n });\n jest.resetAllMocks();", "score": 0.8724934458732605 }, { "filename": "src/entities/interfaces/ChildNode.spec.ts", "retrieved_chunk": " dummy = new Dummy();\n expect(dummy.parent).toBeNull();\n });\n it('should return parent passed via constructor', () => {\n dummy = new Dummy({ parent: parentMock });\n expect(dummy.parent).toEqual(parentMock);\n });\n });\n describe('.remove()', () => {\n beforeEach(() => {", "score": 0.8687845468521118 } ]
typescript
expect(dummy.insertAfter).toBeInstanceOf(Function);
import isGlob from 'is-glob' import micromatch from 'micromatch' import type { IsTargetFilterPath } from './interfaces/pathFilter' import { ERRORS } from './errors' const isStringPath = (pathFilter?: string | string[]): pathFilter is string => { return typeof pathFilter === 'string' && !isGlob(pathFilter) } const isGlobPath = (pattern?: string | string[]): pattern is string => { return typeof pattern === 'string' && isGlob(pattern) } const isMultiPath = ( pathFilter?: string | string[] ): pathFilter is string[] => { return Array.isArray(pathFilter) } const matchSingleStringPath = ( pathname: string, pathFilter?: string ): boolean => { if (!pathFilter) return false return pathname.indexOf(pathFilter) >= 0 } const matchMultiPath = (pathname: string, pathFilterList: string[]) => { return pathFilterList.some((pattern) => matchSingleStringPath(pathname, pattern) ) } const matchSingleGlobPath = ( pathname: string, pattern?: string | string[] ): boolean => { if (!pattern) return false const matches = micromatch([pathname], pattern) return matches && matches.length > 0 } const matchMultiGlobPath = (pathname: string, patterns?: string | string[]) => { return matchSingleGlobPath(pathname, patterns) } /** * checkout weather the path is target filter path */ const isTargetFilterPath: IsTargetFilterPath = ( pathname = '', { pathFilter, req } ) => { // custom path filter if (typeof pathFilter === 'function') { return pathFilter(pathname, req) } // single glob if (isGlobPath(pathFilter)) { return matchSingleGlobPath(pathname, pathFilter) } // single string if (isStringPath(pathFilter)) { return matchSingleStringPath(pathname, pathFilter) } // multi path if (isMultiPath(pathFilter)) { if (pathFilter.every(isStringPath)) { return matchMultiPath(pathname, pathFilter) } if ((pathFilter as string[]).every(isGlobPath)) { return matchMultiGlobPath(pathname, pathFilter) } throw
new Error(ERRORS.ERR_CONTEXT_MATCHER_INVALID_ARRAY) }
return true } export { isTargetFilterPath }
src/pathFilter.ts
yisibell-h3-proxy-46e4021
[ { "filename": "src/pathRewriter.ts", "retrieved_chunk": " if (typeof rewriteConfig === 'function') {\n return true\n } else if (isPlainObj(rewriteConfig)) {\n return Object.keys(rewriteConfig as object).length !== 0\n } else if (rewriteConfig === undefined || rewriteConfig === null) {\n return false\n } else {\n throw new Error(ERRORS.ERR_PATH_REWRITER_CONFIG)\n }\n}", "score": 0.7853502035140991 }, { "filename": "src/pathRewriter.ts", "retrieved_chunk": " return result\n }\n if (typeof rewriteConfig === 'function') {\n const customRewriteFn = rewriteConfig\n return customRewriteFn\n } else {\n rulesCache = parsePathRewriteRules(rewriteConfig as RewriteRecord, logger)\n return rewritePath\n }\n}", "score": 0.7334815263748169 }, { "filename": "src/pathRewriter.ts", "retrieved_chunk": " }\n return rules\n}\n/**\n * Create a path rewriter function\n */\nconst createPathRewriter: CreatePathRewriter = (rewriteConfig, logger) => {\n if (!isValidRewriteConfig(rewriteConfig)) {\n return\n }", "score": 0.7209751605987549 }, { "filename": "src/interfaces/pathFilter.ts", "retrieved_chunk": "import type { IncomingMessage } from 'http'\nexport type CustomPathFilter = (\n pathname: string,\n req: IncomingMessage\n) => boolean\nexport type PathFilterParams = string | string[] | CustomPathFilter\nexport type IsTargetFilterPath = (\n pathname: string | undefined,\n opts: {\n pathFilter?: PathFilterParams", "score": 0.7106785178184509 }, { "filename": "src/index.ts", "retrieved_chunk": " const { req } = event.node\n const path = getUrlPath(req.url, target)\n // generate proxy request options via default strategy\n const proxyRequestOptions = createProxyRequestOptions(event, finalOptions)\n if (isTargetFilterPath(path, { pathFilter, req })) {\n const pathRewriter = createPathRewriter(pathRewrite, logger)\n let rewritedPath = path\n if (pathRewriter) {\n rewritedPath = await pathRewriter(path, req)\n }", "score": 0.6952264308929443 } ]
typescript
new Error(ERRORS.ERR_CONTEXT_MATCHER_INVALID_ARRAY) }
import isGlob from 'is-glob' import micromatch from 'micromatch' import type { IsTargetFilterPath } from './interfaces/pathFilter' import { ERRORS } from './errors' const isStringPath = (pathFilter?: string | string[]): pathFilter is string => { return typeof pathFilter === 'string' && !isGlob(pathFilter) } const isGlobPath = (pattern?: string | string[]): pattern is string => { return typeof pattern === 'string' && isGlob(pattern) } const isMultiPath = ( pathFilter?: string | string[] ): pathFilter is string[] => { return Array.isArray(pathFilter) } const matchSingleStringPath = ( pathname: string, pathFilter?: string ): boolean => { if (!pathFilter) return false return pathname.indexOf(pathFilter) >= 0 } const matchMultiPath = (pathname: string, pathFilterList: string[]) => { return pathFilterList.some((pattern) => matchSingleStringPath(pathname, pattern) ) } const matchSingleGlobPath = ( pathname: string, pattern?: string | string[] ): boolean => { if (!pattern) return false const matches = micromatch([pathname], pattern) return matches && matches.length > 0 } const matchMultiGlobPath = (pathname: string, patterns?: string | string[]) => { return matchSingleGlobPath(pathname, patterns) } /** * checkout weather the path is target filter path */
const isTargetFilterPath: IsTargetFilterPath = ( pathname = '', { pathFilter, req }
) => { // custom path filter if (typeof pathFilter === 'function') { return pathFilter(pathname, req) } // single glob if (isGlobPath(pathFilter)) { return matchSingleGlobPath(pathname, pathFilter) } // single string if (isStringPath(pathFilter)) { return matchSingleStringPath(pathname, pathFilter) } // multi path if (isMultiPath(pathFilter)) { if (pathFilter.every(isStringPath)) { return matchMultiPath(pathname, pathFilter) } if ((pathFilter as string[]).every(isGlobPath)) { return matchMultiGlobPath(pathname, pathFilter) } throw new Error(ERRORS.ERR_CONTEXT_MATCHER_INVALID_ARRAY) } return true } export { isTargetFilterPath }
src/pathFilter.ts
yisibell-h3-proxy-46e4021
[ { "filename": "src/interfaces/pathFilter.ts", "retrieved_chunk": "import type { IncomingMessage } from 'http'\nexport type CustomPathFilter = (\n pathname: string,\n req: IncomingMessage\n) => boolean\nexport type PathFilterParams = string | string[] | CustomPathFilter\nexport type IsTargetFilterPath = (\n pathname: string | undefined,\n opts: {\n pathFilter?: PathFilterParams", "score": 0.8964834213256836 }, { "filename": "src/pathRewriter.ts", "retrieved_chunk": " }\n return rules\n}\n/**\n * Create a path rewriter function\n */\nconst createPathRewriter: CreatePathRewriter = (rewriteConfig, logger) => {\n if (!isValidRewriteConfig(rewriteConfig)) {\n return\n }", "score": 0.8653205037117004 }, { "filename": "src/pathRewriter.ts", "retrieved_chunk": " return result\n }\n if (typeof rewriteConfig === 'function') {\n const customRewriteFn = rewriteConfig\n return customRewriteFn\n } else {\n rulesCache = parsePathRewriteRules(rewriteConfig as RewriteRecord, logger)\n return rewritePath\n }\n}", "score": 0.8227139711380005 }, { "filename": "src/proxyRequestStrategy.ts", "retrieved_chunk": " }\n return `${hostname}`\n}\nconst createProxyRequestOptions = (\n event: H3Event,\n options: CreateProxyEventHandlerOptions\n): ProxyRequestOptions | undefined => {\n const { configureProxyRequest, changeOrigin, target } = options\n const defaultOptions: ProxyRequestOptions = {\n headers: {},", "score": 0.8195008039474487 }, { "filename": "src/index.ts", "retrieved_chunk": " const { req } = event.node\n const path = getUrlPath(req.url, target)\n // generate proxy request options via default strategy\n const proxyRequestOptions = createProxyRequestOptions(event, finalOptions)\n if (isTargetFilterPath(path, { pathFilter, req })) {\n const pathRewriter = createPathRewriter(pathRewrite, logger)\n let rewritedPath = path\n if (pathRewriter) {\n rewritedPath = await pathRewriter(path, req)\n }", "score": 0.8119611740112305 } ]
typescript
const isTargetFilterPath: IsTargetFilterPath = ( pathname = '', { pathFilter, req }
import { beforeEach, describe, expect, it } from '@jest/globals'; import { ParentNode } from '../interfaces'; import { createInlineToolData, createInlineToolName, FormattingNode } from './index'; import { TextNode } from '../TextNode'; const parentMock = { insertAfter: jest.fn(), removeChild: jest.fn(), append: jest.fn(), children: [], } as unknown as ParentNode; const createChildMock = (value: string): TextNode => ({ getText: jest.fn(() => value), appendTo: jest.fn(), insertText: jest.fn(), removeText: jest.fn(), split: jest.fn(() => null), format: jest.fn(() => [ new FormattingNode({ tool: createInlineToolName('tool') }) ]), length: value.length, } as unknown as TextNode); describe('FormattingNode', () => { const childMock = createChildMock('Some text here. '); const anotherChildMock = createChildMock('Another text here.'); const tool = createInlineToolName('bold'); const anotherTool = createInlineToolName('italic'); const data = createInlineToolData({}); let node: FormattingNode; beforeEach(() => { node = new FormattingNode({ tool, data, parent: parentMock as FormattingNode, children: [childMock, anotherChildMock], }); jest.clearAllMocks(); }); describe('.length', () => { it('should return sum of lengths of children', () => { expect(node.length).toEqual(childMock.length + anotherChildMock.length); }); }); describe('.serialized', () => { it('should return concatenated text of all fragments with fragments list describing formatting', () => { const result = node.serialized; expect(result).toEqual({ text: childMock.getText() + anotherChildMock.getText(), fragments: [ { tool, data, range: [0, node.length], }, ], }); }); }); describe('.insertText()', () => { const newText = 'new text'; const index = 3; it('should lead calling insertText() of the child with the passed index', () => { node.insertText(newText, index); expect(childMock
.insertText).toBeCalledWith(newText, index);
}); it('should adjust index by child offset', () => { const offset = childMock.length; node.insertText(newText, index + offset); expect(anotherChildMock.insertText).toBeCalledWith(newText, index); }); it('should append text to the last child by default', () => { node.insertText(newText); expect(anotherChildMock.insertText).toBeCalledWith(newText, anotherChildMock.length); }); }); describe('.removeText()', () => { const start = 3; const end = 5; it('should remove text from appropriate child', () => { node.removeText(start, end); expect(childMock.removeText).toBeCalledWith(start, end); }); it('should adjust indices by child offset', () => { const offset = childMock.length; node.removeText(offset + start, offset + end); expect(anotherChildMock.removeText).toBeCalledWith(start, end); }); it('should call removeText for each affected child', () => { const offset = childMock.length; node.removeText(start, offset + end); expect(childMock.removeText).toBeCalledWith(start, offset); expect(anotherChildMock.removeText).toBeCalledWith(0, end); }); it('should remove all text by default', () => { node.removeText(); expect(childMock.removeText).toBeCalledWith(0, childMock.length); expect(anotherChildMock.removeText).toBeCalledWith(0, anotherChildMock.length); }); it('should call remove() if length is 0 after removeText() call', () => { const removeSpy = jest.spyOn(node, 'remove'); const lengthSpy = jest.spyOn(node, 'length', 'get').mockImplementation(() => 0); node.removeText(); expect(removeSpy).toBeCalled(); removeSpy.mockRestore(); lengthSpy.mockRestore(); }); }); describe('.getText()', () => { const start = 3; const end = 5; it('should call getText() for the relevant child', () => { node.getText(start, end); expect(childMock.getText).toBeCalledWith(start, end); }); it('should adjust index by child offset', () => { const offset = childMock.length; node.getText(offset + start, offset + end); expect(anotherChildMock.getText).toBeCalledWith(start, end); }); it('should call getText for all relevant children', () => { const offset = childMock.length; node.getText(start, offset + end); expect(childMock.getText).toBeCalledWith(start, offset); expect(anotherChildMock.getText).toBeCalledWith(0, end); }); it('should return all text by default', () => { node.getText(); expect(childMock.getText).toBeCalledWith(0, childMock.length); expect(anotherChildMock.getText).toBeCalledWith(0, anotherChildMock.length); }); }); describe('.getFragments()', () => { /** * @todo */ it.todo('should return fragments for sub-tree'); it('should return node\'s fragment', () => { const fragments = node.getFragments(); expect(fragments).toEqual([ { tool, data, range: [0, node.length], }, ]); }); }); describe('.split()', () => { const index = 5; it('should not split (return null) if index is 0', () => { const newNode = node.split(0); expect(newNode).toBeNull(); }); it('should not split (return null) if index equals text length', () => { const newNode = node.split(node.length); expect(newNode).toBeNull(); }); it('should create new FormattingNode on split', () => { const newNode = node.split(index); expect(newNode).toBeInstanceOf(FormattingNode); }); /** * @todo check this and related cases with integration tests */ it('should create new FormattingNode with children split from the original one', () => { const newNode = node.split(childMock.length); expect(newNode?.children).toEqual([ anotherChildMock ]); }); it('should call split method of child containing the specified index', () => { node.split(index); expect(childMock.split).toBeCalledWith(index); }); it('should insert new node to the parent', () => { const newNode = node.split(index); expect(parentMock.insertAfter).toBeCalledWith(node, newNode); }); }); describe('.format()', () => { const start = 3; const end = 5; it('should apply formatting to the relevant child', () => { node.format(anotherTool, start, end); expect(childMock.format).toBeCalledWith(anotherTool, start, end, undefined); }); it('should adjust index by child offset', () => { const offset = childMock.length; node.format(anotherTool, offset + start, offset + end); expect(anotherChildMock.format).toBeCalledWith(anotherTool, start, end, undefined); }); it('should format all relevant children', () => { const offset = childMock.length; node.format(anotherTool, start, offset + end); expect(childMock.format).toBeCalledWith(anotherTool, start, offset, undefined); expect(anotherChildMock.format).toBeCalledWith(anotherTool, 0, end, undefined); }); it('should do nothing if same tool is being applied', () => { node.format(tool, start, end); expect(childMock.format).not.toBeCalled(); expect(anotherChildMock.format).not.toBeCalled(); }); it('should return empty array if same tool is being applied', () => { const result = node.format(tool, start, end); expect(result).toHaveLength(0); }); it('should return array of new formatting nodes', () => { const result = node.format(anotherTool, start, end); expect(result).toEqual(childMock.format(anotherTool, start, end)); }); }); });
src/entities/FormattingNode/FormattingNode.spec.ts
editor-js-document-model-4cb9623
[ { "filename": "src/entities/TextNode/TextNode.spec.ts", "retrieved_chunk": " node.split(index);\n expect(node.getText()).toEqual(initialText.slice(0, index));\n });\n it('should insert new node to the parent', () => {\n const newNode = node.split(index);\n expect(parentMock.insertAfter).toBeCalledWith(node, newNode);\n });\n });\n describe('.serialized', () => {\n it('should return text value and empty array of fragments', () => {", "score": 0.9206974506378174 }, { "filename": "src/entities/TextNode/TextNode.spec.ts", "retrieved_chunk": " it('should append text if not empty', () => {\n node.insertText(text);\n expect(node.getText()).toEqual(initialText + text);\n });\n it('should prepend text if index is 0 and node is not empty', () => {\n node.insertText(text, 0);\n expect(node.getText()).toEqual(text + initialText);\n });\n it('should insert text at index if not empty', () => {\n const index = 5;", "score": 0.9061757326126099 }, { "filename": "src/entities/TextNode/TextNode.spec.ts", "retrieved_chunk": " it('should have empty value by default', () => {\n node = new TextNode();\n expect(node.getText()).toEqual('');\n });\n describe('.insertText()', () => {\n it('should set text to value if node is empty', () => {\n node = new TextNode();\n node.insertText(text);\n expect(node.getText()).toEqual(text);\n });", "score": 0.905231237411499 }, { "filename": "src/entities/TextNode/TextNode.spec.ts", "retrieved_chunk": " const end = 5;\n expect(() => node.getText(start, end)).not.toThrowError();\n });\n });\n describe('.removeText()', () => {\n it('should remove all text by default', () => {\n node.removeText();\n expect(node.getText()).toEqual('');\n });\n it('should remove text from specified index', () => {", "score": 0.8996722102165222 }, { "filename": "src/entities/TextNode/TextNode.spec.ts", "retrieved_chunk": " node.insertText(text, index);\n expect(node.getText()).toEqual(initialText.slice(0, index) + text + initialText.slice(index));\n });\n it('should throw an error if index is less than 0', () => {\n const f = (): void => node.insertText(text, -1);\n expect(f).toThrowError();\n });\n it('should throw an error if index is greater than node length', () => {\n const f = (): void => node.insertText(text, initialText.length + 1);\n expect(f).toThrowError();", "score": 0.8948001861572266 } ]
typescript
.insertText).toBeCalledWith(newText, index);
import { EditorDocument } from './index'; import { BlockNode } from '../BlockNode'; import { createBlockNodeMock } from '../../utils/mocks/createBlockNodeMock'; import { createEditorDocumentMock } from '../../utils/mocks/createEditorDocumentMock'; /** * Creates an EditorDocument object with some blocks for tests. */ function createEditorDocumentMockWithSomeBlocks(): EditorDocument { const document = createEditorDocumentMock(); const countOfBlocks = 3; for (let i = 0; i < countOfBlocks; i++) { const block = createBlockNodeMock({ parent: document, }); document.addBlock(block); } return document; } describe('EditorDocument', () => { describe('.length', () => { it('should return the number of blocks in the document', () => { // Arrange const blocksCount = 3; const document = new EditorDocument({ children: [], properties: { readOnly: false, }, }); for (let i = 0; i < blocksCount; i++) { const block = createBlockNodeMock({ parent: document, }); document.addBlock(block); } // Act const actual = document.length; // Assert expect(actual).toBe(blocksCount); }); }); describe('.addBlock()', () => { it('should add the block to the end of the document if index is not provided', () => { // Arrange const document = createEditorDocumentMockWithSomeBlocks(); const block = createBlockNodeMock({ parent: document, }); // Act document.addBlock(block); // Assert const lastBlock = document.getBlock(document.length - 1); expect(lastBlock).toBe(block); }); it('should add the block to the beginning of the document if index is 0', () => { // Arrange const document = createEditorDocumentMockWithSomeBlocks(); const block = createBlockNodeMock({ parent: document, }); // Act document.addBlock(block, 0); // Assert expect(document.getBlock(0)).toBe(block); }); it('should add the block to the specified index in the middle of the document', () => { // Arrange const document = createEditorDocumentMockWithSomeBlocks(); const block = createBlockNodeMock({ parent: document, }); // Act document.addBlock(block, 1); // Assert expect(document.getBlock(1)).toBe(block); }); it('should add the block to the end of the document if the index after the last element is passed', () => { // Arrange const document = createEditorDocumentMockWithSomeBlocks(); const block = createBlockNodeMock({ parent: document, }); // Act document.addBlock(block, document.length); // Assert const lastBlock = document.getBlock(document.length - 1); expect(lastBlock).toBe(block); }); it('should throw an error if index is less then 0', () => { // Arrange const document = createEditorDocumentMockWithSomeBlocks(); const block = createBlockNodeMock({ parent: document, }); // Act const action = (): void => document.addBlock(block, -1); // Assert expect(action).toThrowError('Index out of bounds'); }); it('should throw an error if index is greater then document length', () => { // Arrange const document = createEditorDocumentMockWithSomeBlocks(); const block = createBlockNodeMock({ parent: document, }); // Act const action = (): void => document.addBlock(block, document.length + 1); // Assert expect(action).toThrowError('Index out of bounds'); }); }); describe('.removeBlock()', () => { it('should remove the block from the beginning of the document if index 0 is passed', () => { // Arrange const document = createEditorDocumentMockWithSomeBlocks(); const block = document.getBlock(0); // Act
document.removeBlock(0);
// Assert expect(document.getBlock(0)).not.toBe(block); }); it('should remove the block from the specified index in the middle of the document', () => { // Arrange const document = createEditorDocumentMockWithSomeBlocks(); const block = document.getBlock(1); // Act document.removeBlock(1); // Assert expect(document.getBlock(1)).not.toBe(block); }); it('should remove the block from the end of the document if the last index is passed', () => { // Arrange const document = createEditorDocumentMockWithSomeBlocks(); const documentLengthBeforeRemove = document.length; // Act document.removeBlock(document.length - 1); // Assert expect(document.length).toBe(documentLengthBeforeRemove - 1); }); it('should throw an error if index is less then 0', () => { // Arrange const document = createEditorDocumentMockWithSomeBlocks(); // Act const action = (): void => document.removeBlock(-1); // Assert expect(action).toThrowError('Index out of bounds'); }); it('should throw an error if index is greater then document length', () => { // Arrange const document = createEditorDocumentMockWithSomeBlocks(); // Act const action = (): void => document.removeBlock(document.length); // Assert expect(action).toThrowError('Index out of bounds'); }); }); describe('.getBlock()', () => { it('should return the block from the specific index', () => { // Arrange const document = createEditorDocumentMock(); const countOfBlocks = 3; const blocks: BlockNode[] = []; for (let i = 0; i < countOfBlocks; i++) { const block = createBlockNodeMock({ parent: document, }); document.addBlock(block); blocks.push(block); } const index = 1; // Act const block = document.getBlock(index); // Assert expect(block).toBe(blocks[index]); }); it('should throw an error if index is less then 0', () => { // Arrange const document = createEditorDocumentMockWithSomeBlocks(); // Act const action = (): BlockNode => document.getBlock(-1); // Assert expect(action).toThrowError('Index out of bounds'); }); it('should throw an error if index is greater then document length', () => { // Arrange const document = createEditorDocumentMockWithSomeBlocks(); // Act const action = (): BlockNode => document.getBlock(document.length); // Assert expect(action).toThrowError('Index out of bounds'); }); }); });
src/entities/EditorDocument/EditorDocument.spec.ts
editor-js-document-model-4cb9623
[ { "filename": "src/entities/TextNode/TextNode.spec.ts", "retrieved_chunk": " it('should throw an error if end is invalid index', () => {\n expect(() => node.getText(0, initialText.length + 1)).toThrowError();\n });\n it('should throw an error if end index is greater than start index', () => {\n const start = 5;\n const end = 3;\n expect(() => node.getText(start, end)).toThrowError();\n });\n it('should not throw an error if end index is equal to start index', () => {\n const start = 5;", "score": 0.8582810759544373 }, { "filename": "src/entities/FormattingNode/FormattingNode.spec.ts", "retrieved_chunk": " },\n ],\n });\n });\n });\n describe('.insertText()', () => {\n const newText = 'new text';\n const index = 3;\n it('should lead calling insertText() of the child with the passed index', () => {\n node.insertText(newText, index);", "score": 0.8570513129234314 }, { "filename": "src/entities/FormattingNode/FormattingNode.spec.ts", "retrieved_chunk": " });\n });\n describe('.removeText()', () => {\n const start = 3;\n const end = 5;\n it('should remove text from appropriate child', () => {\n node.removeText(start, end);\n expect(childMock.removeText).toBeCalledWith(start, end);\n });\n it('should adjust indices by child offset', () => {", "score": 0.8563235402107239 }, { "filename": "src/entities/TextNode/TextNode.spec.ts", "retrieved_chunk": " node.split(index);\n expect(node.getText()).toEqual(initialText.slice(0, index));\n });\n it('should insert new node to the parent', () => {\n const newNode = node.split(index);\n expect(parentMock.insertAfter).toBeCalledWith(node, newNode);\n });\n });\n describe('.serialized', () => {\n it('should return text value and empty array of fragments', () => {", "score": 0.8505130410194397 }, { "filename": "src/entities/BlockNode/BlockNode.spec.ts", "retrieved_chunk": " },\n parent: {} as EditorDocument,\n });\n blockNode.serialized;\n spyArray.forEach((spy) => {\n expect(spy).toHaveBeenCalled();\n });\n });\n it('should call .serialized getter of all child TextNodes associated with the BlockNode', () => {\n const countOfTextNodes = 3;", "score": 0.8457094430923462 } ]
typescript
document.removeBlock(0);
import { beforeEach, describe, expect, it } from '@jest/globals'; import { ParentNode } from '../interfaces'; import { createInlineToolData, createInlineToolName, FormattingNode } from './index'; import { TextNode } from '../TextNode'; const parentMock = { insertAfter: jest.fn(), removeChild: jest.fn(), append: jest.fn(), children: [], } as unknown as ParentNode; const createChildMock = (value: string): TextNode => ({ getText: jest.fn(() => value), appendTo: jest.fn(), insertText: jest.fn(), removeText: jest.fn(), split: jest.fn(() => null), format: jest.fn(() => [ new FormattingNode({ tool: createInlineToolName('tool') }) ]), length: value.length, } as unknown as TextNode); describe('FormattingNode', () => { const childMock = createChildMock('Some text here. '); const anotherChildMock = createChildMock('Another text here.'); const tool = createInlineToolName('bold'); const anotherTool = createInlineToolName('italic'); const data = createInlineToolData({}); let node: FormattingNode; beforeEach(() => { node = new FormattingNode({ tool, data, parent: parentMock as FormattingNode, children: [childMock, anotherChildMock], }); jest.clearAllMocks(); }); describe('.length', () => { it('should return sum of lengths of children', () => {
expect(node.length).toEqual(childMock.length + anotherChildMock.length);
}); }); describe('.serialized', () => { it('should return concatenated text of all fragments with fragments list describing formatting', () => { const result = node.serialized; expect(result).toEqual({ text: childMock.getText() + anotherChildMock.getText(), fragments: [ { tool, data, range: [0, node.length], }, ], }); }); }); describe('.insertText()', () => { const newText = 'new text'; const index = 3; it('should lead calling insertText() of the child with the passed index', () => { node.insertText(newText, index); expect(childMock.insertText).toBeCalledWith(newText, index); }); it('should adjust index by child offset', () => { const offset = childMock.length; node.insertText(newText, index + offset); expect(anotherChildMock.insertText).toBeCalledWith(newText, index); }); it('should append text to the last child by default', () => { node.insertText(newText); expect(anotherChildMock.insertText).toBeCalledWith(newText, anotherChildMock.length); }); }); describe('.removeText()', () => { const start = 3; const end = 5; it('should remove text from appropriate child', () => { node.removeText(start, end); expect(childMock.removeText).toBeCalledWith(start, end); }); it('should adjust indices by child offset', () => { const offset = childMock.length; node.removeText(offset + start, offset + end); expect(anotherChildMock.removeText).toBeCalledWith(start, end); }); it('should call removeText for each affected child', () => { const offset = childMock.length; node.removeText(start, offset + end); expect(childMock.removeText).toBeCalledWith(start, offset); expect(anotherChildMock.removeText).toBeCalledWith(0, end); }); it('should remove all text by default', () => { node.removeText(); expect(childMock.removeText).toBeCalledWith(0, childMock.length); expect(anotherChildMock.removeText).toBeCalledWith(0, anotherChildMock.length); }); it('should call remove() if length is 0 after removeText() call', () => { const removeSpy = jest.spyOn(node, 'remove'); const lengthSpy = jest.spyOn(node, 'length', 'get').mockImplementation(() => 0); node.removeText(); expect(removeSpy).toBeCalled(); removeSpy.mockRestore(); lengthSpy.mockRestore(); }); }); describe('.getText()', () => { const start = 3; const end = 5; it('should call getText() for the relevant child', () => { node.getText(start, end); expect(childMock.getText).toBeCalledWith(start, end); }); it('should adjust index by child offset', () => { const offset = childMock.length; node.getText(offset + start, offset + end); expect(anotherChildMock.getText).toBeCalledWith(start, end); }); it('should call getText for all relevant children', () => { const offset = childMock.length; node.getText(start, offset + end); expect(childMock.getText).toBeCalledWith(start, offset); expect(anotherChildMock.getText).toBeCalledWith(0, end); }); it('should return all text by default', () => { node.getText(); expect(childMock.getText).toBeCalledWith(0, childMock.length); expect(anotherChildMock.getText).toBeCalledWith(0, anotherChildMock.length); }); }); describe('.getFragments()', () => { /** * @todo */ it.todo('should return fragments for sub-tree'); it('should return node\'s fragment', () => { const fragments = node.getFragments(); expect(fragments).toEqual([ { tool, data, range: [0, node.length], }, ]); }); }); describe('.split()', () => { const index = 5; it('should not split (return null) if index is 0', () => { const newNode = node.split(0); expect(newNode).toBeNull(); }); it('should not split (return null) if index equals text length', () => { const newNode = node.split(node.length); expect(newNode).toBeNull(); }); it('should create new FormattingNode on split', () => { const newNode = node.split(index); expect(newNode).toBeInstanceOf(FormattingNode); }); /** * @todo check this and related cases with integration tests */ it('should create new FormattingNode with children split from the original one', () => { const newNode = node.split(childMock.length); expect(newNode?.children).toEqual([ anotherChildMock ]); }); it('should call split method of child containing the specified index', () => { node.split(index); expect(childMock.split).toBeCalledWith(index); }); it('should insert new node to the parent', () => { const newNode = node.split(index); expect(parentMock.insertAfter).toBeCalledWith(node, newNode); }); }); describe('.format()', () => { const start = 3; const end = 5; it('should apply formatting to the relevant child', () => { node.format(anotherTool, start, end); expect(childMock.format).toBeCalledWith(anotherTool, start, end, undefined); }); it('should adjust index by child offset', () => { const offset = childMock.length; node.format(anotherTool, offset + start, offset + end); expect(anotherChildMock.format).toBeCalledWith(anotherTool, start, end, undefined); }); it('should format all relevant children', () => { const offset = childMock.length; node.format(anotherTool, start, offset + end); expect(childMock.format).toBeCalledWith(anotherTool, start, offset, undefined); expect(anotherChildMock.format).toBeCalledWith(anotherTool, 0, end, undefined); }); it('should do nothing if same tool is being applied', () => { node.format(tool, start, end); expect(childMock.format).not.toBeCalled(); expect(anotherChildMock.format).not.toBeCalled(); }); it('should return empty array if same tool is being applied', () => { const result = node.format(tool, start, end); expect(result).toHaveLength(0); }); it('should return array of new formatting nodes', () => { const result = node.format(anotherTool, start, end); expect(result).toEqual(childMock.format(anotherTool, start, end)); }); }); });
src/entities/FormattingNode/FormattingNode.spec.ts
editor-js-document-model-4cb9623
[ { "filename": "src/entities/interfaces/ParentNode.spec.ts", "retrieved_chunk": " dummy = new Dummy({\n children: [ childMock ],\n });\n dummy.removeChild(childMock);\n expect(dummy.children).toHaveLength(0);\n });\n it('should call remove method of child', () => {\n const childMock = createChildMock();\n dummy = new Dummy({\n children: [ childMock ],", "score": 0.8820614814758301 }, { "filename": "src/entities/interfaces/ParentNode.spec.ts", "retrieved_chunk": " expect(dummy.children).toEqual([]);\n });\n it('should return children passed via constructor', () => {\n const childMock = createChildMock();\n dummy = new Dummy({\n children: [ childMock ],\n });\n expect(dummy.children).toEqual([ childMock ]);\n });\n });", "score": 0.8773285150527954 }, { "filename": "src/entities/interfaces/ChildNode.spec.ts", "retrieved_chunk": " });\n it('should set node\\'s parent on appendTo call', () => {\n dummy.appendTo(parentMock);\n expect(dummy.parent).toBe(parentMock);\n });\n it('should do nothing if parents are the same', () => {\n const dummyWithParent = new Dummy({\n parent: parentMock,\n });\n jest.resetAllMocks();", "score": 0.8753229379653931 }, { "filename": "src/entities/interfaces/ParentNode.spec.ts", "retrieved_chunk": " it('should append passed children to new parent', () => {\n const childMock = createChildMock();\n dummy = new Dummy({\n children: [ childMock ],\n });\n expect(childMock.appendTo).toBeCalledWith(dummy);\n });\n });\n describe('.children', () => {\n it('should return empty array by default', () => {", "score": 0.8697030544281006 }, { "filename": "src/entities/interfaces/ParentNode.spec.ts", "retrieved_chunk": " expect(dummy.children).toEqual([childMock, anotherChildMock, oneMoreChildMock]);\n });\n });\n describe('.insertAfter()', () => {\n it('should insert a child after passed target', () => {\n const childMock = createChildMock();\n const anotherChildMock = createChildMock();\n const childMockToInsert = createChildMock();\n dummy = new Dummy({\n children: [childMock, anotherChildMock],", "score": 0.8599786758422852 } ]
typescript
expect(node.length).toEqual(childMock.length + anotherChildMock.length);
import { beforeEach, describe, expect, it } from '@jest/globals'; import { ParentNode } from '../interfaces'; import { createInlineToolData, createInlineToolName, FormattingNode } from './index'; import { TextNode } from '../TextNode'; const parentMock = { insertAfter: jest.fn(), removeChild: jest.fn(), append: jest.fn(), children: [], } as unknown as ParentNode; const createChildMock = (value: string): TextNode => ({ getText: jest.fn(() => value), appendTo: jest.fn(), insertText: jest.fn(), removeText: jest.fn(), split: jest.fn(() => null), format: jest.fn(() => [ new FormattingNode({ tool: createInlineToolName('tool') }) ]), length: value.length, } as unknown as TextNode); describe('FormattingNode', () => { const childMock = createChildMock('Some text here. '); const anotherChildMock = createChildMock('Another text here.'); const tool = createInlineToolName('bold'); const anotherTool = createInlineToolName('italic'); const data = createInlineToolData({}); let node: FormattingNode; beforeEach(() => { node = new FormattingNode({ tool, data, parent: parentMock as FormattingNode, children: [childMock, anotherChildMock], }); jest.clearAllMocks(); }); describe('.length', () => { it('should return sum of lengths of children', () => { expect(node.length).toEqual(childMock.length + anotherChildMock.length); }); }); describe('.serialized', () => { it('should return concatenated text of all fragments with fragments list describing formatting', () => { const result = node.serialized; expect(result).toEqual({ text: childMock
.getText() + anotherChildMock.getText(), fragments: [ {
tool, data, range: [0, node.length], }, ], }); }); }); describe('.insertText()', () => { const newText = 'new text'; const index = 3; it('should lead calling insertText() of the child with the passed index', () => { node.insertText(newText, index); expect(childMock.insertText).toBeCalledWith(newText, index); }); it('should adjust index by child offset', () => { const offset = childMock.length; node.insertText(newText, index + offset); expect(anotherChildMock.insertText).toBeCalledWith(newText, index); }); it('should append text to the last child by default', () => { node.insertText(newText); expect(anotherChildMock.insertText).toBeCalledWith(newText, anotherChildMock.length); }); }); describe('.removeText()', () => { const start = 3; const end = 5; it('should remove text from appropriate child', () => { node.removeText(start, end); expect(childMock.removeText).toBeCalledWith(start, end); }); it('should adjust indices by child offset', () => { const offset = childMock.length; node.removeText(offset + start, offset + end); expect(anotherChildMock.removeText).toBeCalledWith(start, end); }); it('should call removeText for each affected child', () => { const offset = childMock.length; node.removeText(start, offset + end); expect(childMock.removeText).toBeCalledWith(start, offset); expect(anotherChildMock.removeText).toBeCalledWith(0, end); }); it('should remove all text by default', () => { node.removeText(); expect(childMock.removeText).toBeCalledWith(0, childMock.length); expect(anotherChildMock.removeText).toBeCalledWith(0, anotherChildMock.length); }); it('should call remove() if length is 0 after removeText() call', () => { const removeSpy = jest.spyOn(node, 'remove'); const lengthSpy = jest.spyOn(node, 'length', 'get').mockImplementation(() => 0); node.removeText(); expect(removeSpy).toBeCalled(); removeSpy.mockRestore(); lengthSpy.mockRestore(); }); }); describe('.getText()', () => { const start = 3; const end = 5; it('should call getText() for the relevant child', () => { node.getText(start, end); expect(childMock.getText).toBeCalledWith(start, end); }); it('should adjust index by child offset', () => { const offset = childMock.length; node.getText(offset + start, offset + end); expect(anotherChildMock.getText).toBeCalledWith(start, end); }); it('should call getText for all relevant children', () => { const offset = childMock.length; node.getText(start, offset + end); expect(childMock.getText).toBeCalledWith(start, offset); expect(anotherChildMock.getText).toBeCalledWith(0, end); }); it('should return all text by default', () => { node.getText(); expect(childMock.getText).toBeCalledWith(0, childMock.length); expect(anotherChildMock.getText).toBeCalledWith(0, anotherChildMock.length); }); }); describe('.getFragments()', () => { /** * @todo */ it.todo('should return fragments for sub-tree'); it('should return node\'s fragment', () => { const fragments = node.getFragments(); expect(fragments).toEqual([ { tool, data, range: [0, node.length], }, ]); }); }); describe('.split()', () => { const index = 5; it('should not split (return null) if index is 0', () => { const newNode = node.split(0); expect(newNode).toBeNull(); }); it('should not split (return null) if index equals text length', () => { const newNode = node.split(node.length); expect(newNode).toBeNull(); }); it('should create new FormattingNode on split', () => { const newNode = node.split(index); expect(newNode).toBeInstanceOf(FormattingNode); }); /** * @todo check this and related cases with integration tests */ it('should create new FormattingNode with children split from the original one', () => { const newNode = node.split(childMock.length); expect(newNode?.children).toEqual([ anotherChildMock ]); }); it('should call split method of child containing the specified index', () => { node.split(index); expect(childMock.split).toBeCalledWith(index); }); it('should insert new node to the parent', () => { const newNode = node.split(index); expect(parentMock.insertAfter).toBeCalledWith(node, newNode); }); }); describe('.format()', () => { const start = 3; const end = 5; it('should apply formatting to the relevant child', () => { node.format(anotherTool, start, end); expect(childMock.format).toBeCalledWith(anotherTool, start, end, undefined); }); it('should adjust index by child offset', () => { const offset = childMock.length; node.format(anotherTool, offset + start, offset + end); expect(anotherChildMock.format).toBeCalledWith(anotherTool, start, end, undefined); }); it('should format all relevant children', () => { const offset = childMock.length; node.format(anotherTool, start, offset + end); expect(childMock.format).toBeCalledWith(anotherTool, start, offset, undefined); expect(anotherChildMock.format).toBeCalledWith(anotherTool, 0, end, undefined); }); it('should do nothing if same tool is being applied', () => { node.format(tool, start, end); expect(childMock.format).not.toBeCalled(); expect(anotherChildMock.format).not.toBeCalled(); }); it('should return empty array if same tool is being applied', () => { const result = node.format(tool, start, end); expect(result).toHaveLength(0); }); it('should return array of new formatting nodes', () => { const result = node.format(anotherTool, start, end); expect(result).toEqual(childMock.format(anotherTool, start, end)); }); }); });
src/entities/FormattingNode/FormattingNode.spec.ts
editor-js-document-model-4cb9623
[ { "filename": "src/entities/TextNode/TextNode.spec.ts", "retrieved_chunk": " node.split(index);\n expect(node.getText()).toEqual(initialText.slice(0, index));\n });\n it('should insert new node to the parent', () => {\n const newNode = node.split(index);\n expect(parentMock.insertAfter).toBeCalledWith(node, newNode);\n });\n });\n describe('.serialized', () => {\n it('should return text value and empty array of fragments', () => {", "score": 0.8989676833152771 }, { "filename": "src/entities/TextNode/TextNode.spec.ts", "retrieved_chunk": " });\n describe('.format()', () => {\n it('should return just one FormattingNode, if formatting full TextNode', () => {\n const name = createInlineToolName('bold');\n const fragments = node.format(name, 0, initialText.length);\n expect(fragments).toHaveLength(1);\n expect(fragments[0]).toBeInstanceOf(FormattingNode);\n });\n it('should return two fragments if formatting from the start, but not to the end', () => {\n const name = createInlineToolName('bold');", "score": 0.8800868988037109 }, { "filename": "src/entities/BlockNode/BlockNode.spec.ts", "retrieved_chunk": " },\n parent: {} as EditorDocument,\n });\n blockNode.serialized;\n spyArray.forEach((spy) => {\n expect(spy).toHaveBeenCalled();\n });\n });\n it('should call .serialized getter of all child TextNodes associated with the BlockNode', () => {\n const countOfTextNodes = 3;", "score": 0.8658186197280884 }, { "filename": "src/entities/interfaces/ParentNode.spec.ts", "retrieved_chunk": " it('should append passed children to new parent', () => {\n const childMock = createChildMock();\n dummy = new Dummy({\n children: [ childMock ],\n });\n expect(childMock.appendTo).toBeCalledWith(dummy);\n });\n });\n describe('.children', () => {\n it('should return empty array by default', () => {", "score": 0.8641265630722046 }, { "filename": "src/entities/interfaces/ParentNode.spec.ts", "retrieved_chunk": " expect(dummy.children).toEqual([]);\n });\n it('should return children passed via constructor', () => {\n const childMock = createChildMock();\n dummy = new Dummy({\n children: [ childMock ],\n });\n expect(dummy.children).toEqual([ childMock ]);\n });\n });", "score": 0.8430029153823853 } ]
typescript
.getText() + anotherChildMock.getText(), fragments: [ {
import { FormattingNodeConstructorParameters, InlineToolName, InlineToolData } from './types'; import { ChildNode, InlineFragment, InlineNode, InlineNodeSerialized, ParentNode } from '../interfaces'; export * from './types'; /** * We need to extend FormattingNode interface with ChildNode and ParentNode ones to use the methods from mixins */ export interface FormattingNode extends ChildNode, ParentNode {} /** * FormattingNode class represents a node in a tree-like structure, used to store and manipulate formatted text content */ @ParentNode @ChildNode export class FormattingNode implements InlineNode { /** * Private field representing the name of the formatting tool applied to the content */ #tool: InlineToolName; /** * Any additional data associated with the formatting tool */ #data?: InlineToolData; /** * Constructor for FormattingNode class. * * @param args - FormattingNode constructor arguments. * @param args.tool - The name of the formatting tool applied to the content. * @param args.data - Any additional data associated with the formatting. */ constructor({ tool, data }: FormattingNodeConstructorParameters) { this.#tool = tool; this.#data = data; } /** * Returns text value length of current node (including subtree) */ public get length(): number { return this.children.reduce((sum, child) => sum + child.length, 0); } /** * Returns serialized value of the node: text and formatting fragments */ public get serialized(): InlineNodeSerialized { return { text: this.getText(), fragments: this.getFragments(), }; } /** * Inserts text to the specified index, by default appends text to the end of the current value * * @param text - text to insert * @param [index] - char index where to insert text */ public insertText(text: string, index = this.length): void { const [child, offset] = this.#findChildByIndex(index); child?.insertText(text, index - offset); } /** * Removes text form the specified range * * @param [start] - start char index of the range, by default 0 * @param [end] - end char index of the range, by default length of the text value * @returns {string} removed text */ public removeText(start = 0, end = this.length): string { const result = this.#reduceChildrenInRange( start, end, (acc, child, childStart, childEnd) => { return acc + child.removeText(childStart, childEnd); }, '' ); if (this.length === 0) { this.remove(); } return result; } /** * Returns text from the specified range * * @param [start] - start char index of the range, by default 0 * @param [end] - end char index of the range, by default length of the text value */ public getText(start = 0, end = this.length): string { return this.#reduceChildrenInRange( start, end, (acc, child, childStart, childEnd) => { return acc + child.getText(childStart, childEnd); }, '' ); } /** * Returns inline fragments for subtree including current node from the specified range * * @param [start] - start char index of the range, by default 0 * @param [end] - end char index of the range, by default length of the text value */ public getFragments(start = 0, end = this.length): InlineFragment[] { return this.#reduceChildrenInRange<InlineFragment[]>( start, end, (acc, child, childStart, childEnd) => { /** * If child is not a FormattingNode, it doesn't include any fragments. So we skip it. */ if (!(child instanceof FormattingNode)) { return acc; } acc.push(...child.getFragments(childStart, childEnd)); return acc; }, [ { tool: this.#tool, data: this.#data, range: [start, end], } ] ); } /** * Splits current node by the specified index * * @param index - char index where to split the node * @returns {FormattingNode | null} new node */ public split(index: number): FormattingNode | null { if (index === 0 || index === this.length) { return null; } const newNode = new FormattingNode({ tool: this.#tool, data: this.#data, }); const [child, offset] = this.#findChildByIndex(index); if (!child) { return null; } // Have to save length as it is changed after split const childLength = child.length; const splitNode = child.split(index - offset); let midNodeIndex = this.children.indexOf(child); /** * If node is split or if node is not split but index equals to child length, we should split children from the next node */ if (splitNode || (index - offset === childLength)) { midNodeIndex += 1; } newNode.append(...this.children.slice(midNodeIndex)); this.parent?.insertAfter(this, newNode); return newNode; } /** * Applies formatting to the text with specified inline tool in the specified range * * @param tool - name of inline tool to apply * @param start - char start index of the range * @param end - char end index of the range * @param [data] - inline tool data if applicable */ public format(tool: InlineToolName, start: number, end: number, data?: InlineToolData): InlineNode[] { /** * In case current tool is the same as new one, do nothing */ if (tool === this.#tool) { return []; } return this.#reduceChildrenInRange<InlineNode[]>( start, end, (acc, child, childStart, childEnd) => { acc.push(...child.format(tool, childStart, childEnd, data)); return acc; }, [] ); } /** * Iterates through children in range and calls callback for each * * @param start - range start char index * @param end - range end char index * @param callback - callback to apply on children * @param initialValue - initial accumulator value * @private */ #reduceChildrenInRange<Acc>( start: number, end: number, callback: (acc: Acc, child: InlineNode, start: number, end: number) => Acc, initialValue: Acc ): Acc { let result = initialValue; for (const child of this.children) { if (start < child.
length && end > 0 && start < end) {
result = callback(result, child, Math.max(start, 0), Math.min(child.length, end)); } start -= child.length; end -= child.length; } return result; } /** * Returns child by passed text index * * @param index - char index * @private */ #findChildByIndex(index: number): [child: InlineNode & ChildNode | null, offset: number] { let totalLength = 0; for (const child of this.children) { if (index <= child.length + totalLength) { return [child, totalLength]; } totalLength += child.length; } return [null, totalLength]; } }
src/entities/FormattingNode/index.ts
editor-js-document-model-4cb9623
[ { "filename": "src/entities/TextNode/index.ts", "retrieved_chunk": " *\n * @param start - start char index of the range\n * @param end - end char index of the range\n */\n #cloneContents(start: number, end: number): TextNode {\n return new TextNode({\n value: this.getText(start, end),\n });\n }\n}", "score": 0.8378757238388062 }, { "filename": "src/entities/TextNode/index.ts", "retrieved_chunk": " public format(tool: InlineToolName, start: number, end: number, data?: InlineToolData): InlineNode[] {\n this.#validateIndex(start);\n this.#validateIndex(end);\n const formattingNode = new FormattingNode({\n tool,\n data,\n });\n const fragments: ChildNode[] = [];\n /**\n * If start index is greater than 0, we need extract part of the text before the start index", "score": 0.8328096866607666 }, { "filename": "src/entities/interfaces/ParentNode.ts", "retrieved_chunk": " *\n * @param target - target after which to insert new children\n * @param children - children nodes to insert\n */\n public insertAfter(target: ChildNode, ...children: ChildNode[]): void {\n /**\n * If node is already a child of current node, remove it to append at the end\n */\n children.forEach(child => {\n const index = this.children.indexOf(child);", "score": 0.8219796419143677 }, { "filename": "src/entities/interfaces/ParentNode.ts", "retrieved_chunk": " * Appends passed children to this node\n *\n * @param children - array of children to append\n */\n public append(...children: ChildNode[]): void {\n /**\n * If node is already a child of current node, remove it to append at the end\n */\n children.forEach(child => {\n const index = this.children.indexOf(child);", "score": 0.8176668882369995 }, { "filename": "src/entities/interfaces/InlineNode.ts", "retrieved_chunk": " * @param [start] - start char index of the range\n * @param [end] - end char index of the range\n * @param [data] - Inline Tool data if applicable\n * @returns {InlineNode[]} - array of nodes after applied formatting\n */\n format(name: InlineToolName, start?: number, end?: number, data?: InlineToolData): InlineNode[];\n /**\n * Inserts text at passed char index\n *\n * @param text - text to insert", "score": 0.8176589012145996 } ]
typescript
length && end > 0 && start < end) {
import { beforeEach, describe, expect, it } from '@jest/globals'; import { ParentNode } from '../interfaces'; import { createInlineToolData, createInlineToolName, FormattingNode } from './index'; import { TextNode } from '../TextNode'; const parentMock = { insertAfter: jest.fn(), removeChild: jest.fn(), append: jest.fn(), children: [], } as unknown as ParentNode; const createChildMock = (value: string): TextNode => ({ getText: jest.fn(() => value), appendTo: jest.fn(), insertText: jest.fn(), removeText: jest.fn(), split: jest.fn(() => null), format: jest.fn(() => [ new FormattingNode({ tool: createInlineToolName('tool') }) ]), length: value.length, } as unknown as TextNode); describe('FormattingNode', () => { const childMock = createChildMock('Some text here. '); const anotherChildMock = createChildMock('Another text here.'); const tool = createInlineToolName('bold'); const anotherTool = createInlineToolName('italic'); const data = createInlineToolData({}); let node: FormattingNode; beforeEach(() => { node = new FormattingNode({ tool, data, parent: parentMock as FormattingNode, children: [childMock, anotherChildMock], }); jest.clearAllMocks(); }); describe('.length', () => { it('should return sum of lengths of children', () => { expect(node.length).toEqual(childMock.length + anotherChildMock.length); }); }); describe('.serialized', () => { it('should return concatenated text of all fragments with fragments list describing formatting', () => { const result = node.serialized; expect(result).toEqual({ text: childMock.getText() + anotherChildMock.getText(), fragments: [ { tool, data, range: [0, node.length], }, ], }); }); }); describe('.insertText()', () => { const newText = 'new text'; const index = 3; it('should lead calling insertText() of the child with the passed index', () => { node.insertText(newText, index); expect(childMock.insertText).toBeCalledWith(newText, index); }); it('should adjust index by child offset', () => { const offset = childMock.length; node.insertText(newText, index + offset); expect(anotherChildMock.insertText).toBeCalledWith(newText, index); }); it('should append text to the last child by default', () => { node.insertText(newText); expect(anotherChildMock.insertText).toBeCalledWith(newText, anotherChildMock.length); }); }); describe('.removeText()', () => { const start = 3; const end = 5; it('should remove text from appropriate child', () => { node.removeText(start, end); expect(childMock.removeText).toBeCalledWith(start, end); }); it('should adjust indices by child offset', () => { const offset = childMock.length; node.removeText(offset + start, offset + end); expect(anotherChildMock.removeText).toBeCalledWith(start, end); }); it('should call removeText for each affected child', () => { const offset = childMock.length; node.removeText(start, offset + end); expect(childMock.removeText).toBeCalledWith(start, offset); expect(anotherChildMock.removeText).toBeCalledWith(0, end); }); it('should remove all text by default', () => { node.removeText(); expect(childMock.removeText).toBeCalledWith(0, childMock.length); expect(anotherChildMock.removeText).toBeCalledWith(0, anotherChildMock.length); }); it('should call remove() if length is 0 after removeText() call', () => { const removeSpy = jest.spyOn(node, 'remove'); const lengthSpy = jest.spyOn(node, 'length', 'get').mockImplementation(() => 0); node.removeText(); expect(removeSpy).toBeCalled(); removeSpy.mockRestore(); lengthSpy.mockRestore(); }); }); describe('.getText()', () => { const start = 3; const end = 5; it('should call getText() for the relevant child', () => { node.getText(start, end); expect(childMock.getText).toBeCalledWith(start, end); }); it('should adjust index by child offset', () => { const offset = childMock.length; node.getText(offset + start, offset + end); expect(anotherChildMock.getText).toBeCalledWith(start, end); }); it('should call getText for all relevant children', () => { const offset = childMock.length; node.getText(start, offset + end); expect(childMock.getText).toBeCalledWith(start, offset); expect(anotherChildMock.getText).toBeCalledWith(0, end); }); it('should return all text by default', () => { node.getText(); expect(childMock.getText).toBeCalledWith(0, childMock.length); expect(anotherChildMock.getText).toBeCalledWith(0, anotherChildMock.length); }); }); describe('.getFragments()', () => { /** * @todo */ it.todo('should return fragments for sub-tree'); it('should return node\'s fragment', () => { const fragments = node.getFragments(); expect(fragments).toEqual([ { tool, data, range: [0, node.length], }, ]); }); }); describe('.split()', () => { const index = 5; it('should not split (return null) if index is 0', () => { const newNode = node.split(0); expect(newNode).toBeNull(); }); it('should not split (return null) if index equals text length', () => { const newNode = node.split(node.length); expect(newNode).toBeNull(); }); it('should create new FormattingNode on split', () => { const newNode = node.split(index); expect(newNode).toBeInstanceOf(FormattingNode); }); /** * @todo check this and related cases with integration tests */ it('should create new FormattingNode with children split from the original one', () => { const newNode = node.split(childMock.length); expect(newNode?.children).toEqual([ anotherChildMock ]); }); it('should call split method of child containing the specified index', () => { node.split(index); expect(childMock.split).toBeCalledWith(index); }); it('should insert new node to the parent', () => { const newNode = node.split(index); expect(parentMock.insertAfter).toBeCalledWith(node, newNode); }); }); describe('.format()', () => { const start = 3; const end = 5; it('should apply formatting to the relevant child', () => { node.format(anotherTool, start, end); expect(
childMock.format).toBeCalledWith(anotherTool, start, end, undefined);
}); it('should adjust index by child offset', () => { const offset = childMock.length; node.format(anotherTool, offset + start, offset + end); expect(anotherChildMock.format).toBeCalledWith(anotherTool, start, end, undefined); }); it('should format all relevant children', () => { const offset = childMock.length; node.format(anotherTool, start, offset + end); expect(childMock.format).toBeCalledWith(anotherTool, start, offset, undefined); expect(anotherChildMock.format).toBeCalledWith(anotherTool, 0, end, undefined); }); it('should do nothing if same tool is being applied', () => { node.format(tool, start, end); expect(childMock.format).not.toBeCalled(); expect(anotherChildMock.format).not.toBeCalled(); }); it('should return empty array if same tool is being applied', () => { const result = node.format(tool, start, end); expect(result).toHaveLength(0); }); it('should return array of new formatting nodes', () => { const result = node.format(anotherTool, start, end); expect(result).toEqual(childMock.format(anotherTool, start, end)); }); }); });
src/entities/FormattingNode/FormattingNode.spec.ts
editor-js-document-model-4cb9623
[ { "filename": "src/entities/TextNode/TextNode.spec.ts", "retrieved_chunk": " const formattingNode = fragments[1] as FormattingNode;\n expect(formattingNode.children[0].getText()).toEqual(initialText.slice(start, end));\n });\n it('should call parent\\'s insertAfter with new nodes', () => {\n const name = createInlineToolName('bold');\n const start = 5;\n const end = 8;\n const fragments = node.format(name, start, end);\n expect(parentMock.insertAfter).toBeCalledWith(node, ...fragments);\n });", "score": 0.9124166369438171 }, { "filename": "src/entities/TextNode/TextNode.spec.ts", "retrieved_chunk": " node.split(index);\n expect(node.getText()).toEqual(initialText.slice(0, index));\n });\n it('should insert new node to the parent', () => {\n const newNode = node.split(index);\n expect(parentMock.insertAfter).toBeCalledWith(node, newNode);\n });\n });\n describe('.serialized', () => {\n it('should return text value and empty array of fragments', () => {", "score": 0.902079701423645 }, { "filename": "src/entities/TextNode/TextNode.spec.ts", "retrieved_chunk": " const end = 5;\n expect(() => node.getText(start, end)).not.toThrowError();\n });\n });\n describe('.removeText()', () => {\n it('should remove all text by default', () => {\n node.removeText();\n expect(node.getText()).toEqual('');\n });\n it('should remove text from specified index', () => {", "score": 0.8745898604393005 }, { "filename": "src/entities/BlockNode/BlockNode.spec.ts", "retrieved_chunk": " },\n parent: {} as EditorDocument,\n });\n blockNode.serialized;\n spyArray.forEach((spy) => {\n expect(spy).toHaveBeenCalled();\n });\n });\n it('should call .serialized getter of all child TextNodes associated with the BlockNode', () => {\n const countOfTextNodes = 3;", "score": 0.8727271556854248 }, { "filename": "src/entities/interfaces/ParentNode.spec.ts", "retrieved_chunk": " it('should append passed children to new parent', () => {\n const childMock = createChildMock();\n dummy = new Dummy({\n children: [ childMock ],\n });\n expect(childMock.appendTo).toBeCalledWith(dummy);\n });\n });\n describe('.children', () => {\n it('should return empty array by default', () => {", "score": 0.8725297451019287 } ]
typescript
childMock.format).toBeCalledWith(anotherTool, start, end, undefined);
import { FormattingNodeConstructorParameters, InlineToolName, InlineToolData } from './types'; import { ChildNode, InlineFragment, InlineNode, InlineNodeSerialized, ParentNode } from '../interfaces'; export * from './types'; /** * We need to extend FormattingNode interface with ChildNode and ParentNode ones to use the methods from mixins */ export interface FormattingNode extends ChildNode, ParentNode {} /** * FormattingNode class represents a node in a tree-like structure, used to store and manipulate formatted text content */ @ParentNode @ChildNode export class FormattingNode implements InlineNode { /** * Private field representing the name of the formatting tool applied to the content */ #tool: InlineToolName; /** * Any additional data associated with the formatting tool */ #data?: InlineToolData; /** * Constructor for FormattingNode class. * * @param args - FormattingNode constructor arguments. * @param args.tool - The name of the formatting tool applied to the content. * @param args.data - Any additional data associated with the formatting. */ constructor({ tool, data }: FormattingNodeConstructorParameters) { this.#tool = tool; this.#data = data; } /** * Returns text value length of current node (including subtree) */ public get length(): number { return this.children.reduce((
sum, child) => sum + child.length, 0);
} /** * Returns serialized value of the node: text and formatting fragments */ public get serialized(): InlineNodeSerialized { return { text: this.getText(), fragments: this.getFragments(), }; } /** * Inserts text to the specified index, by default appends text to the end of the current value * * @param text - text to insert * @param [index] - char index where to insert text */ public insertText(text: string, index = this.length): void { const [child, offset] = this.#findChildByIndex(index); child?.insertText(text, index - offset); } /** * Removes text form the specified range * * @param [start] - start char index of the range, by default 0 * @param [end] - end char index of the range, by default length of the text value * @returns {string} removed text */ public removeText(start = 0, end = this.length): string { const result = this.#reduceChildrenInRange( start, end, (acc, child, childStart, childEnd) => { return acc + child.removeText(childStart, childEnd); }, '' ); if (this.length === 0) { this.remove(); } return result; } /** * Returns text from the specified range * * @param [start] - start char index of the range, by default 0 * @param [end] - end char index of the range, by default length of the text value */ public getText(start = 0, end = this.length): string { return this.#reduceChildrenInRange( start, end, (acc, child, childStart, childEnd) => { return acc + child.getText(childStart, childEnd); }, '' ); } /** * Returns inline fragments for subtree including current node from the specified range * * @param [start] - start char index of the range, by default 0 * @param [end] - end char index of the range, by default length of the text value */ public getFragments(start = 0, end = this.length): InlineFragment[] { return this.#reduceChildrenInRange<InlineFragment[]>( start, end, (acc, child, childStart, childEnd) => { /** * If child is not a FormattingNode, it doesn't include any fragments. So we skip it. */ if (!(child instanceof FormattingNode)) { return acc; } acc.push(...child.getFragments(childStart, childEnd)); return acc; }, [ { tool: this.#tool, data: this.#data, range: [start, end], } ] ); } /** * Splits current node by the specified index * * @param index - char index where to split the node * @returns {FormattingNode | null} new node */ public split(index: number): FormattingNode | null { if (index === 0 || index === this.length) { return null; } const newNode = new FormattingNode({ tool: this.#tool, data: this.#data, }); const [child, offset] = this.#findChildByIndex(index); if (!child) { return null; } // Have to save length as it is changed after split const childLength = child.length; const splitNode = child.split(index - offset); let midNodeIndex = this.children.indexOf(child); /** * If node is split or if node is not split but index equals to child length, we should split children from the next node */ if (splitNode || (index - offset === childLength)) { midNodeIndex += 1; } newNode.append(...this.children.slice(midNodeIndex)); this.parent?.insertAfter(this, newNode); return newNode; } /** * Applies formatting to the text with specified inline tool in the specified range * * @param tool - name of inline tool to apply * @param start - char start index of the range * @param end - char end index of the range * @param [data] - inline tool data if applicable */ public format(tool: InlineToolName, start: number, end: number, data?: InlineToolData): InlineNode[] { /** * In case current tool is the same as new one, do nothing */ if (tool === this.#tool) { return []; } return this.#reduceChildrenInRange<InlineNode[]>( start, end, (acc, child, childStart, childEnd) => { acc.push(...child.format(tool, childStart, childEnd, data)); return acc; }, [] ); } /** * Iterates through children in range and calls callback for each * * @param start - range start char index * @param end - range end char index * @param callback - callback to apply on children * @param initialValue - initial accumulator value * @private */ #reduceChildrenInRange<Acc>( start: number, end: number, callback: (acc: Acc, child: InlineNode, start: number, end: number) => Acc, initialValue: Acc ): Acc { let result = initialValue; for (const child of this.children) { if (start < child.length && end > 0 && start < end) { result = callback(result, child, Math.max(start, 0), Math.min(child.length, end)); } start -= child.length; end -= child.length; } return result; } /** * Returns child by passed text index * * @param index - char index * @private */ #findChildByIndex(index: number): [child: InlineNode & ChildNode | null, offset: number] { let totalLength = 0; for (const child of this.children) { if (index <= child.length + totalLength) { return [child, totalLength]; } totalLength += child.length; } return [null, totalLength]; } }
src/entities/FormattingNode/index.ts
editor-js-document-model-4cb9623
[ { "filename": "src/entities/TextNode/index.ts", "retrieved_chunk": " constructor({ value = '' }: TextNodeConstructorParameters = {}) {\n this.#value = value;\n }\n /**\n * Returns length of the text\n */\n public get length(): number {\n return this.#value.length;\n }\n /**", "score": 0.919564962387085 }, { "filename": "src/entities/EditorDocument/index.ts", "retrieved_chunk": " * @param args.properties - The properties of the document.\n */\n constructor({ children, properties }: EditorDocumentConstructorParameters) {\n this.#children = children;\n this.#properties = properties;\n }\n /**\n * Returns count of child BlockNodes of the EditorDocument.\n */\n public get length(): number {", "score": 0.8767206072807312 }, { "filename": "src/entities/interfaces/InlineNode.ts", "retrieved_chunk": "import { InlineToolData, InlineToolName } from '../FormattingNode';\n/**\n * Interface describing abstract InlineNode\n */\nexport interface InlineNode {\n /**\n * Text length of node and it's subtree\n */\n length: number;\n /**", "score": 0.8765383362770081 }, { "filename": "src/entities/interfaces/ParentNode.ts", "retrieved_chunk": " this.#children = children;\n this.children.forEach(child => child.appendTo(this));\n }\n /**\n * Returns node's children\n */\n public get children(): ChildNode[] {\n return this.#children;\n }\n /**", "score": 0.8614046573638916 }, { "filename": "src/entities/TextNode/index.ts", "retrieved_chunk": " * Returns serialized value of the node\n */\n public get serialized(): InlineNodeSerialized {\n return {\n text: this.getText(),\n // No fragments for text node\n fragments: [],\n };\n }\n /**", "score": 0.8581295013427734 } ]
typescript
sum, child) => sum + child.length, 0);
import { FormattingNodeConstructorParameters, InlineToolName, InlineToolData } from './types'; import { ChildNode, InlineFragment, InlineNode, InlineNodeSerialized, ParentNode } from '../interfaces'; export * from './types'; /** * We need to extend FormattingNode interface with ChildNode and ParentNode ones to use the methods from mixins */ export interface FormattingNode extends ChildNode, ParentNode {} /** * FormattingNode class represents a node in a tree-like structure, used to store and manipulate formatted text content */ @ParentNode @ChildNode export class FormattingNode implements InlineNode { /** * Private field representing the name of the formatting tool applied to the content */ #tool: InlineToolName; /** * Any additional data associated with the formatting tool */ #data?: InlineToolData; /** * Constructor for FormattingNode class. * * @param args - FormattingNode constructor arguments. * @param args.tool - The name of the formatting tool applied to the content. * @param args.data - Any additional data associated with the formatting. */ constructor({ tool, data }: FormattingNodeConstructorParameters) { this.#tool = tool; this.#data = data; } /** * Returns text value length of current node (including subtree) */ public get length(): number { return this.children.reduce
((sum, child) => sum + child.length, 0);
} /** * Returns serialized value of the node: text and formatting fragments */ public get serialized(): InlineNodeSerialized { return { text: this.getText(), fragments: this.getFragments(), }; } /** * Inserts text to the specified index, by default appends text to the end of the current value * * @param text - text to insert * @param [index] - char index where to insert text */ public insertText(text: string, index = this.length): void { const [child, offset] = this.#findChildByIndex(index); child?.insertText(text, index - offset); } /** * Removes text form the specified range * * @param [start] - start char index of the range, by default 0 * @param [end] - end char index of the range, by default length of the text value * @returns {string} removed text */ public removeText(start = 0, end = this.length): string { const result = this.#reduceChildrenInRange( start, end, (acc, child, childStart, childEnd) => { return acc + child.removeText(childStart, childEnd); }, '' ); if (this.length === 0) { this.remove(); } return result; } /** * Returns text from the specified range * * @param [start] - start char index of the range, by default 0 * @param [end] - end char index of the range, by default length of the text value */ public getText(start = 0, end = this.length): string { return this.#reduceChildrenInRange( start, end, (acc, child, childStart, childEnd) => { return acc + child.getText(childStart, childEnd); }, '' ); } /** * Returns inline fragments for subtree including current node from the specified range * * @param [start] - start char index of the range, by default 0 * @param [end] - end char index of the range, by default length of the text value */ public getFragments(start = 0, end = this.length): InlineFragment[] { return this.#reduceChildrenInRange<InlineFragment[]>( start, end, (acc, child, childStart, childEnd) => { /** * If child is not a FormattingNode, it doesn't include any fragments. So we skip it. */ if (!(child instanceof FormattingNode)) { return acc; } acc.push(...child.getFragments(childStart, childEnd)); return acc; }, [ { tool: this.#tool, data: this.#data, range: [start, end], } ] ); } /** * Splits current node by the specified index * * @param index - char index where to split the node * @returns {FormattingNode | null} new node */ public split(index: number): FormattingNode | null { if (index === 0 || index === this.length) { return null; } const newNode = new FormattingNode({ tool: this.#tool, data: this.#data, }); const [child, offset] = this.#findChildByIndex(index); if (!child) { return null; } // Have to save length as it is changed after split const childLength = child.length; const splitNode = child.split(index - offset); let midNodeIndex = this.children.indexOf(child); /** * If node is split or if node is not split but index equals to child length, we should split children from the next node */ if (splitNode || (index - offset === childLength)) { midNodeIndex += 1; } newNode.append(...this.children.slice(midNodeIndex)); this.parent?.insertAfter(this, newNode); return newNode; } /** * Applies formatting to the text with specified inline tool in the specified range * * @param tool - name of inline tool to apply * @param start - char start index of the range * @param end - char end index of the range * @param [data] - inline tool data if applicable */ public format(tool: InlineToolName, start: number, end: number, data?: InlineToolData): InlineNode[] { /** * In case current tool is the same as new one, do nothing */ if (tool === this.#tool) { return []; } return this.#reduceChildrenInRange<InlineNode[]>( start, end, (acc, child, childStart, childEnd) => { acc.push(...child.format(tool, childStart, childEnd, data)); return acc; }, [] ); } /** * Iterates through children in range and calls callback for each * * @param start - range start char index * @param end - range end char index * @param callback - callback to apply on children * @param initialValue - initial accumulator value * @private */ #reduceChildrenInRange<Acc>( start: number, end: number, callback: (acc: Acc, child: InlineNode, start: number, end: number) => Acc, initialValue: Acc ): Acc { let result = initialValue; for (const child of this.children) { if (start < child.length && end > 0 && start < end) { result = callback(result, child, Math.max(start, 0), Math.min(child.length, end)); } start -= child.length; end -= child.length; } return result; } /** * Returns child by passed text index * * @param index - char index * @private */ #findChildByIndex(index: number): [child: InlineNode & ChildNode | null, offset: number] { let totalLength = 0; for (const child of this.children) { if (index <= child.length + totalLength) { return [child, totalLength]; } totalLength += child.length; } return [null, totalLength]; } }
src/entities/FormattingNode/index.ts
editor-js-document-model-4cb9623
[ { "filename": "src/entities/TextNode/index.ts", "retrieved_chunk": " constructor({ value = '' }: TextNodeConstructorParameters = {}) {\n this.#value = value;\n }\n /**\n * Returns length of the text\n */\n public get length(): number {\n return this.#value.length;\n }\n /**", "score": 0.9164409637451172 }, { "filename": "src/entities/EditorDocument/index.ts", "retrieved_chunk": " * @param args.properties - The properties of the document.\n */\n constructor({ children, properties }: EditorDocumentConstructorParameters) {\n this.#children = children;\n this.#properties = properties;\n }\n /**\n * Returns count of child BlockNodes of the EditorDocument.\n */\n public get length(): number {", "score": 0.8718640804290771 }, { "filename": "src/entities/interfaces/InlineNode.ts", "retrieved_chunk": "import { InlineToolData, InlineToolName } from '../FormattingNode';\n/**\n * Interface describing abstract InlineNode\n */\nexport interface InlineNode {\n /**\n * Text length of node and it's subtree\n */\n length: number;\n /**", "score": 0.8710030317306519 }, { "filename": "src/entities/interfaces/ParentNode.ts", "retrieved_chunk": " this.#children = children;\n this.children.forEach(child => child.appendTo(this));\n }\n /**\n * Returns node's children\n */\n public get children(): ChildNode[] {\n return this.#children;\n }\n /**", "score": 0.8589815497398376 }, { "filename": "src/entities/TextNode/index.ts", "retrieved_chunk": " public format(tool: InlineToolName, start: number, end: number, data?: InlineToolData): InlineNode[] {\n this.#validateIndex(start);\n this.#validateIndex(end);\n const formattingNode = new FormattingNode({\n tool,\n data,\n });\n const fragments: ChildNode[] = [];\n /**\n * If start index is greater than 0, we need extract part of the text before the start index", "score": 0.8507667183876038 } ]
typescript
((sum, child) => sum + child.length, 0);
import { describe, it, expect, beforeEach } from '@jest/globals'; import { TextNode } from './index'; import { createInlineToolName, FormattingNode } from '../FormattingNode'; import type { ParentNode } from '../interfaces'; describe('TextNode', () => { const initialText = 'initial text'; const text = 'some text'; const parentMock = { insertAfter: jest.fn(), removeChild: jest.fn(), append: jest.fn(), children: [], } as unknown as ParentNode; let node: TextNode; beforeEach(() => { node = new TextNode({ value: initialText, parent: parentMock as FormattingNode, }); }); it('should have empty value by default', () => { node = new TextNode(); expect(node.getText()).toEqual(''); }); describe('.insertText()', () => { it('should set text to value if node is empty', () => { node = new TextNode(); node.insertText(text); expect(node.getText()).toEqual(text); }); it('should append text if not empty', () => { node.insertText(text); expect(node.getText()).toEqual(initialText + text); }); it('should prepend text if index is 0 and node is not empty', () => { node.insertText(text, 0); expect(node.getText()).toEqual(text + initialText); }); it('should insert text at index if not empty', () => { const index = 5; node.insertText(text, index); expect(node.getText()).toEqual(initialText.slice(0, index) + text + initialText.slice(index)); }); it('should throw an error if index is less than 0', () => { const f = (): void => node.insertText(text, -1); expect(f).toThrowError(); }); it('should throw an error if index is greater than node length', () => { const f = (): void => node.insertText(text, initialText.length + 1); expect(f).toThrowError(); }); }); describe('.getText()', () => { it('should return sliced value if start provided', () => { const start = 5; expect(node.getText(start)).toEqual(initialText.slice(start)); }); it('should return sliced value if end provided', () => { const end = 6; expect(node.getText(0, end)).toEqual(initialText.slice(0, end)); }); it('should return sliced value if full range provided', () => { const start = 3; const end = 9; expect(node.getText(start, end)).toEqual(initialText.slice(start, end)); }); it('should throw an error if start is invalid index', () => { expect(() => node.getText(-1)).toThrowError(); expect(() => node.getText(initialText.length + 1)).toThrowError(); }); it('should throw an error if end is invalid index', () => { expect(() => node.getText(0, initialText.length + 1)).toThrowError(); }); it('should throw an error if end index is greater than start index', () => { const start = 5; const end = 3; expect(() => node.getText(start, end)).toThrowError(); }); it('should not throw an error if end index is equal to start index', () => { const start = 5; const end = 5; expect(() => node.getText(start, end)).not.toThrowError(); }); }); describe('.removeText()', () => { it('should remove all text by default', () => { node.removeText(); expect(node.getText()).toEqual(''); }); it('should remove text from specified index', () => { const start = 3; node.removeText(start); expect(node.getText()).toEqual(initialText.slice(0, start)); }); it('should remove text from 0 to specified end index', () => { const end = 8; node.removeText(0, end); expect(node.getText()).toEqual(initialText.slice(end)); }); it('should remove text from specified start and end indecies', () => { const start = 3; const end = 8; node.removeText(start, end); expect(node.getText()).toEqual(initialText.slice(0, start) + initialText.slice(end)); }); it('should call remove() method if node is empty after removeText() call', () => { jest.spyOn(node, 'remove'); node.removeText(); expect(node.remove).toBeCalled(); }); }); describe('.format()', () => { it('should return just one FormattingNode, if formatting full TextNode', () => { const name = createInlineToolName('bold'); const fragments = node.format(name, 0, initialText.length); expect(fragments).toHaveLength(1); expect(fragments[0]).toBeInstanceOf(FormattingNode); }); it('should return two fragments if formatting from the start, but not to the end', () => { const name = createInlineToolName('bold'); const end = 5; const fragments = node.format(name, 0, end); expect(fragments).toHaveLength(2); expect(fragments[0]).toBeInstanceOf(FormattingNode); expect(fragments[1]).toBeInstanceOf(TextNode); }); it('should return two fragments if formatting to the end, but not from the start', () => { const name = createInlineToolName('bold'); const start = 5; const fragments = node.format(name, start, initialText.length); expect(fragments).toHaveLength(2); expect(fragments[0]).toBeInstanceOf(TextNode); expect(fragments[1]).toBeInstanceOf(FormattingNode); }); it('should return three fragments if formatting in the middle', () => { const name = createInlineToolName('bold'); const start = 5; const end = 8; const fragments = node.format(name, start, end); // eslint-disable-next-line @typescript-eslint/no-magic-numbers expect(fragments).toHaveLength(3); expect(fragments[0]).toBeInstanceOf(TextNode); expect(fragments[1]).toBeInstanceOf(FormattingNode); expect(fragments[2]).toBeInstanceOf(TextNode); }); it('should return FormattingNode with a TextNode as a child with correct text value', () => { const name = createInlineToolName('bold'); const start = 5; const end = 8; const fragments = node.format(name, start, end); const formattingNode = fragments[1] as FormattingNode; expect(
formattingNode.children[0].getText()).toEqual(initialText.slice(start, end));
}); it('should call parent\'s insertAfter with new nodes', () => { const name = createInlineToolName('bold'); const start = 5; const end = 8; const fragments = node.format(name, start, end); expect(parentMock.insertAfter).toBeCalledWith(node, ...fragments); }); }); describe('.split()', () => { const index = 5; it('should not split (return null) if index is 0', () => { const newNode = node.split(0); expect(newNode).toBeNull(); }); it('should not split (return null) if index equals text length', () => { const newNode = node.split(initialText.length); expect(newNode).toBeNull(); }); it('should create new TextNode on split', () => { const newNode = node.split(index); expect(newNode).toBeInstanceOf(TextNode); }); it('should create new TextNode with text value splitted from the original one', () => { const newNode = node.split(index); expect(newNode?.getText()).toEqual(initialText.slice(index)); }); it('should remove split text value from the original node', () => { node.split(index); expect(node.getText()).toEqual(initialText.slice(0, index)); }); it('should insert new node to the parent', () => { const newNode = node.split(index); expect(parentMock.insertAfter).toBeCalledWith(node, newNode); }); }); describe('.serialized', () => { it('should return text value and empty array of fragments', () => { const result = node.serialized; expect(result).toEqual({ text: initialText, fragments: [], }); }); }); });
src/entities/TextNode/TextNode.spec.ts
editor-js-document-model-4cb9623
[ { "filename": "src/entities/FormattingNode/FormattingNode.spec.ts", "retrieved_chunk": " const newNode = node.split(index);\n expect(parentMock.insertAfter).toBeCalledWith(node, newNode);\n });\n });\n describe('.format()', () => {\n const start = 3;\n const end = 5;\n it('should apply formatting to the relevant child', () => {\n node.format(anotherTool, start, end);\n expect(childMock.format).toBeCalledWith(anotherTool, start, end, undefined);", "score": 0.8777447938919067 }, { "filename": "src/entities/FormattingNode/FormattingNode.spec.ts", "retrieved_chunk": " it('should not split (return null) if index equals text length', () => {\n const newNode = node.split(node.length);\n expect(newNode).toBeNull();\n });\n it('should create new FormattingNode on split', () => {\n const newNode = node.split(index);\n expect(newNode).toBeInstanceOf(FormattingNode);\n });\n /**\n * @todo check this and related cases with integration tests", "score": 0.8698276281356812 }, { "filename": "src/entities/FormattingNode/FormattingNode.spec.ts", "retrieved_chunk": " });\n it('should return array of new formatting nodes', () => {\n const result = node.format(anotherTool, start, end);\n expect(result).toEqual(childMock.format(anotherTool, start, end));\n });\n });\n});", "score": 0.8631448149681091 }, { "filename": "src/entities/FormattingNode/FormattingNode.spec.ts", "retrieved_chunk": " describe('.serialized', () => {\n it('should return concatenated text of all fragments with fragments list describing formatting', () => {\n const result = node.serialized;\n expect(result).toEqual({\n text: childMock.getText() + anotherChildMock.getText(),\n fragments: [\n {\n tool,\n data,\n range: [0, node.length],", "score": 0.8598257303237915 }, { "filename": "src/entities/FormattingNode/FormattingNode.spec.ts", "retrieved_chunk": " removeSpy.mockRestore();\n lengthSpy.mockRestore();\n });\n });\n describe('.getText()', () => {\n const start = 3;\n const end = 5;\n it('should call getText() for the relevant child', () => {\n node.getText(start, end);\n expect(childMock.getText).toBeCalledWith(start, end);", "score": 0.8562235832214355 } ]
typescript
formattingNode.children[0].getText()).toEqual(initialText.slice(start, end));
import { describe, it, expect, beforeEach } from '@jest/globals'; import { TextNode } from './index'; import { createInlineToolName, FormattingNode } from '../FormattingNode'; import type { ParentNode } from '../interfaces'; describe('TextNode', () => { const initialText = 'initial text'; const text = 'some text'; const parentMock = { insertAfter: jest.fn(), removeChild: jest.fn(), append: jest.fn(), children: [], } as unknown as ParentNode; let node: TextNode; beforeEach(() => { node = new TextNode({ value: initialText, parent: parentMock as FormattingNode, }); }); it('should have empty value by default', () => { node = new TextNode(); expect(node.getText()).toEqual(''); }); describe('.insertText()', () => { it('should set text to value if node is empty', () => { node = new TextNode(); node.insertText(text); expect(node.getText()).toEqual(text); }); it('should append text if not empty', () => { node.insertText(text); expect(node.getText()).toEqual(initialText + text); }); it('should prepend text if index is 0 and node is not empty', () => { node.insertText(text, 0); expect(node.getText()).toEqual(text + initialText); }); it('should insert text at index if not empty', () => { const index = 5; node.insertText(text, index); expect(node.getText()).toEqual(initialText.slice(0, index) + text + initialText.slice(index)); }); it('should throw an error if index is less than 0', () => { const f = (): void => node.insertText(text, -1); expect(f).toThrowError(); }); it('should throw an error if index is greater than node length', () => { const f = (): void => node.insertText(text, initialText.length + 1); expect(f).toThrowError(); }); }); describe('.getText()', () => { it('should return sliced value if start provided', () => { const start = 5; expect(node.getText(start)).toEqual(initialText.slice(start)); }); it('should return sliced value if end provided', () => { const end = 6; expect(node.getText(0, end)).toEqual(initialText.slice(0, end)); }); it('should return sliced value if full range provided', () => { const start = 3; const end = 9; expect(node.getText(start, end)).toEqual(initialText.slice(start, end)); }); it('should throw an error if start is invalid index', () => { expect(() => node.getText(-1)).toThrowError(); expect(() => node.getText(initialText.length + 1)).toThrowError(); }); it('should throw an error if end is invalid index', () => { expect(() => node.getText(0, initialText.length + 1)).toThrowError(); }); it('should throw an error if end index is greater than start index', () => { const start = 5; const end = 3; expect(() => node.getText(start, end)).toThrowError(); }); it('should not throw an error if end index is equal to start index', () => { const start = 5; const end = 5; expect(() => node.getText(start, end)).not.toThrowError(); }); }); describe('.removeText()', () => { it('should remove all text by default', () => { node.removeText(); expect(node.getText()).toEqual(''); }); it('should remove text from specified index', () => { const start = 3; node.removeText(start); expect(node.getText()).toEqual(initialText.slice(0, start)); }); it('should remove text from 0 to specified end index', () => { const end = 8; node.removeText(0, end); expect(node.getText()).toEqual(initialText.slice(end)); }); it('should remove text from specified start and end indecies', () => { const start = 3; const end = 8; node.removeText(start, end); expect(node.getText()).toEqual(initialText.slice(0, start) + initialText.slice(end)); }); it('should call remove() method if node is empty after removeText() call', () => { jest.spyOn(node, 'remove'); node.removeText();
expect(node.remove).toBeCalled();
}); }); describe('.format()', () => { it('should return just one FormattingNode, if formatting full TextNode', () => { const name = createInlineToolName('bold'); const fragments = node.format(name, 0, initialText.length); expect(fragments).toHaveLength(1); expect(fragments[0]).toBeInstanceOf(FormattingNode); }); it('should return two fragments if formatting from the start, but not to the end', () => { const name = createInlineToolName('bold'); const end = 5; const fragments = node.format(name, 0, end); expect(fragments).toHaveLength(2); expect(fragments[0]).toBeInstanceOf(FormattingNode); expect(fragments[1]).toBeInstanceOf(TextNode); }); it('should return two fragments if formatting to the end, but not from the start', () => { const name = createInlineToolName('bold'); const start = 5; const fragments = node.format(name, start, initialText.length); expect(fragments).toHaveLength(2); expect(fragments[0]).toBeInstanceOf(TextNode); expect(fragments[1]).toBeInstanceOf(FormattingNode); }); it('should return three fragments if formatting in the middle', () => { const name = createInlineToolName('bold'); const start = 5; const end = 8; const fragments = node.format(name, start, end); // eslint-disable-next-line @typescript-eslint/no-magic-numbers expect(fragments).toHaveLength(3); expect(fragments[0]).toBeInstanceOf(TextNode); expect(fragments[1]).toBeInstanceOf(FormattingNode); expect(fragments[2]).toBeInstanceOf(TextNode); }); it('should return FormattingNode with a TextNode as a child with correct text value', () => { const name = createInlineToolName('bold'); const start = 5; const end = 8; const fragments = node.format(name, start, end); const formattingNode = fragments[1] as FormattingNode; expect(formattingNode.children[0].getText()).toEqual(initialText.slice(start, end)); }); it('should call parent\'s insertAfter with new nodes', () => { const name = createInlineToolName('bold'); const start = 5; const end = 8; const fragments = node.format(name, start, end); expect(parentMock.insertAfter).toBeCalledWith(node, ...fragments); }); }); describe('.split()', () => { const index = 5; it('should not split (return null) if index is 0', () => { const newNode = node.split(0); expect(newNode).toBeNull(); }); it('should not split (return null) if index equals text length', () => { const newNode = node.split(initialText.length); expect(newNode).toBeNull(); }); it('should create new TextNode on split', () => { const newNode = node.split(index); expect(newNode).toBeInstanceOf(TextNode); }); it('should create new TextNode with text value splitted from the original one', () => { const newNode = node.split(index); expect(newNode?.getText()).toEqual(initialText.slice(index)); }); it('should remove split text value from the original node', () => { node.split(index); expect(node.getText()).toEqual(initialText.slice(0, index)); }); it('should insert new node to the parent', () => { const newNode = node.split(index); expect(parentMock.insertAfter).toBeCalledWith(node, newNode); }); }); describe('.serialized', () => { it('should return text value and empty array of fragments', () => { const result = node.serialized; expect(result).toEqual({ text: initialText, fragments: [], }); }); }); });
src/entities/TextNode/TextNode.spec.ts
editor-js-document-model-4cb9623
[ { "filename": "src/entities/FormattingNode/FormattingNode.spec.ts", "retrieved_chunk": " });\n });\n describe('.removeText()', () => {\n const start = 3;\n const end = 5;\n it('should remove text from appropriate child', () => {\n node.removeText(start, end);\n expect(childMock.removeText).toBeCalledWith(start, end);\n });\n it('should adjust indices by child offset', () => {", "score": 0.9086815714836121 }, { "filename": "src/entities/FormattingNode/FormattingNode.spec.ts", "retrieved_chunk": " removeSpy.mockRestore();\n lengthSpy.mockRestore();\n });\n });\n describe('.getText()', () => {\n const start = 3;\n const end = 5;\n it('should call getText() for the relevant child', () => {\n node.getText(start, end);\n expect(childMock.getText).toBeCalledWith(start, end);", "score": 0.8950191736221313 }, { "filename": "src/entities/FormattingNode/FormattingNode.spec.ts", "retrieved_chunk": " it('should remove all text by default', () => {\n node.removeText();\n expect(childMock.removeText).toBeCalledWith(0, childMock.length);\n expect(anotherChildMock.removeText).toBeCalledWith(0, anotherChildMock.length);\n });\n it('should call remove() if length is 0 after removeText() call', () => {\n const removeSpy = jest.spyOn(node, 'remove');\n const lengthSpy = jest.spyOn(node, 'length', 'get').mockImplementation(() => 0);\n node.removeText();\n expect(removeSpy).toBeCalled();", "score": 0.8668551445007324 }, { "filename": "src/entities/FormattingNode/FormattingNode.spec.ts", "retrieved_chunk": " const newNode = node.split(index);\n expect(parentMock.insertAfter).toBeCalledWith(node, newNode);\n });\n });\n describe('.format()', () => {\n const start = 3;\n const end = 5;\n it('should apply formatting to the relevant child', () => {\n node.format(anotherTool, start, end);\n expect(childMock.format).toBeCalledWith(anotherTool, start, end, undefined);", "score": 0.8617158532142639 }, { "filename": "src/entities/FormattingNode/FormattingNode.spec.ts", "retrieved_chunk": " });\n it('should adjust index by child offset', () => {\n const offset = childMock.length;\n node.getText(offset + start, offset + end);\n expect(anotherChildMock.getText).toBeCalledWith(start, end);\n });\n it('should call getText for all relevant children', () => {\n const offset = childMock.length;\n node.getText(start, offset + end);\n expect(childMock.getText).toBeCalledWith(start, offset);", "score": 0.8607826828956604 } ]
typescript
expect(node.remove).toBeCalled();
import { FormattingNodeConstructorParameters, InlineToolName, InlineToolData } from './types'; import { ChildNode, InlineFragment, InlineNode, InlineNodeSerialized, ParentNode } from '../interfaces'; export * from './types'; /** * We need to extend FormattingNode interface with ChildNode and ParentNode ones to use the methods from mixins */ export interface FormattingNode extends ChildNode, ParentNode {} /** * FormattingNode class represents a node in a tree-like structure, used to store and manipulate formatted text content */ @ParentNode @ChildNode export class FormattingNode implements InlineNode { /** * Private field representing the name of the formatting tool applied to the content */ #tool: InlineToolName; /** * Any additional data associated with the formatting tool */ #data?: InlineToolData; /** * Constructor for FormattingNode class. * * @param args - FormattingNode constructor arguments. * @param args.tool - The name of the formatting tool applied to the content. * @param args.data - Any additional data associated with the formatting. */ constructor({ tool, data }: FormattingNodeConstructorParameters) { this.#tool = tool; this.#data = data; } /** * Returns text value length of current node (including subtree) */ public get length(): number { return this.children.reduce((sum, child) => sum + child.length, 0); } /** * Returns serialized value of the node: text and formatting fragments */ public get serialized(): InlineNodeSerialized { return { text: this.getText(), fragments: this.getFragments(), }; } /** * Inserts text to the specified index, by default appends text to the end of the current value * * @param text - text to insert * @param [index] - char index where to insert text */ public insertText(text: string, index = this.length): void { const [child, offset] = this.#findChildByIndex(index); child?.insertText(text, index - offset); } /** * Removes text form the specified range * * @param [start] - start char index of the range, by default 0 * @param [end] - end char index of the range, by default length of the text value * @returns {string} removed text */ public removeText(start = 0, end = this.length): string { const result = this.#reduceChildrenInRange( start, end, (acc, child, childStart, childEnd) => { return acc + child.removeText(childStart, childEnd); }, '' ); if (this.length === 0) { this.remove(); } return result; } /** * Returns text from the specified range * * @param [start] - start char index of the range, by default 0 * @param [end] - end char index of the range, by default length of the text value */ public getText(start = 0, end = this.length): string { return this.#reduceChildrenInRange( start, end, (acc, child, childStart, childEnd) => { return acc + child.getText(childStart, childEnd); }, '' ); } /** * Returns inline fragments for subtree including current node from the specified range * * @param [start] - start char index of the range, by default 0 * @param [end] - end char index of the range, by default length of the text value */ public getFragments(start = 0, end = this.length): InlineFragment[] { return this.#reduceChildrenInRange<InlineFragment[]>( start, end, (acc, child, childStart, childEnd) => { /** * If child is not a FormattingNode, it doesn't include any fragments. So we skip it. */ if (!(child instanceof FormattingNode)) { return acc; } acc.push(...child.getFragments(childStart, childEnd)); return acc; }, [ { tool: this.#tool, data: this.#data, range: [start, end], } ] ); } /** * Splits current node by the specified index * * @param index - char index where to split the node * @returns {FormattingNode | null} new node */ public split(index: number): FormattingNode | null { if (index === 0 || index === this.length) { return null; } const newNode = new FormattingNode({ tool: this.#tool, data: this.#data, }); const [child, offset] = this.#findChildByIndex(index); if (!child) { return null; } // Have to save length as it is changed after split const childLength = child.length; const splitNode = child.split(index - offset); let midNodeIndex = this.children.indexOf(child); /** * If node is split or if node is not split but index equals to child length, we should split children from the next node */ if (splitNode || (index - offset === childLength)) { midNodeIndex += 1; } newNode
.append(...this.children.slice(midNodeIndex));
this.parent?.insertAfter(this, newNode); return newNode; } /** * Applies formatting to the text with specified inline tool in the specified range * * @param tool - name of inline tool to apply * @param start - char start index of the range * @param end - char end index of the range * @param [data] - inline tool data if applicable */ public format(tool: InlineToolName, start: number, end: number, data?: InlineToolData): InlineNode[] { /** * In case current tool is the same as new one, do nothing */ if (tool === this.#tool) { return []; } return this.#reduceChildrenInRange<InlineNode[]>( start, end, (acc, child, childStart, childEnd) => { acc.push(...child.format(tool, childStart, childEnd, data)); return acc; }, [] ); } /** * Iterates through children in range and calls callback for each * * @param start - range start char index * @param end - range end char index * @param callback - callback to apply on children * @param initialValue - initial accumulator value * @private */ #reduceChildrenInRange<Acc>( start: number, end: number, callback: (acc: Acc, child: InlineNode, start: number, end: number) => Acc, initialValue: Acc ): Acc { let result = initialValue; for (const child of this.children) { if (start < child.length && end > 0 && start < end) { result = callback(result, child, Math.max(start, 0), Math.min(child.length, end)); } start -= child.length; end -= child.length; } return result; } /** * Returns child by passed text index * * @param index - char index * @private */ #findChildByIndex(index: number): [child: InlineNode & ChildNode | null, offset: number] { let totalLength = 0; for (const child of this.children) { if (index <= child.length + totalLength) { return [child, totalLength]; } totalLength += child.length; } return [null, totalLength]; } }
src/entities/FormattingNode/index.ts
editor-js-document-model-4cb9623
[ { "filename": "src/entities/interfaces/ParentNode.ts", "retrieved_chunk": " * Appends passed children to this node\n *\n * @param children - array of children to append\n */\n public append(...children: ChildNode[]): void {\n /**\n * If node is already a child of current node, remove it to append at the end\n */\n children.forEach(child => {\n const index = this.children.indexOf(child);", "score": 0.8546491861343384 }, { "filename": "src/entities/interfaces/ParentNode.ts", "retrieved_chunk": " *\n * @param target - target after which to insert new children\n * @param children - children nodes to insert\n */\n public insertAfter(target: ChildNode, ...children: ChildNode[]): void {\n /**\n * If node is already a child of current node, remove it to append at the end\n */\n children.forEach(child => {\n const index = this.children.indexOf(child);", "score": 0.8512765169143677 }, { "filename": "src/entities/interfaces/ParentNode.ts", "retrieved_chunk": " *\n * @param child - child to remove\n */\n public removeChild(child: ChildNode): void {\n const index = this.children.indexOf(child);\n this.children.splice(index, 1);\n child.remove();\n }\n /**\n * Inserts new children after specified target", "score": 0.8344660401344299 }, { "filename": "src/entities/TextNode/index.ts", "retrieved_chunk": " /**\n * Splits current node into two nodes by the specified index\n *\n * @param index - char index where to split\n * @returns {TextNode|null} - new node or null if split is not applicable\n */\n public split(index: number): TextNode | null {\n if (index === 0 || index === this.length) {\n return null;\n }", "score": 0.8260561227798462 }, { "filename": "src/entities/TextNode/index.ts", "retrieved_chunk": " const newNode = new TextNode();\n const text = this.removeText(index);\n newNode.insertText(text);\n this.parent?.insertAfter(this, newNode);\n return newNode;\n }\n /**\n * Validates index\n *\n * @param index - char index to validate", "score": 0.8112438917160034 } ]
typescript
.append(...this.children.slice(midNodeIndex));
import fs from 'fs/promises'; import inquirer from 'inquirer'; import pQueue from 'p-queue'; import { setTimeout as delay } from 'timers/promises'; import { Injectable, Logger } from '@nestjs/common'; import { Account as IAccount } from '../../interfaces/account.interface'; import { Secrets } from '../../interfaces/secrets.interface'; class Account implements IAccount { public readonly username: string; public readonly password: string; public sharedSecret: string | null = null; public identitySecret: string | null = null; constructor(account: string) { account = account.trim(); if (account.length === 0) throw new Error('Invalid account'); const parts = account.split(':').map((part) => part.trim()); if (parts.length < 2) throw new Error('Invalid account'); const [username, password, sharedSecret, identitySecret] = parts; this.username = username; this.password = password; if (sharedSecret) this.sharedSecret = sharedSecret; if (identitySecret) this.identitySecret = identitySecret; } } @Injectable() export class AccountsImportService { private readonly logger = new Logger(AccountsImportService.name); private readonly readFilesQueue = new pQueue({ concurrency: 100 }); public async loadAccounts(input: string[] | string) { if (!input) return []; if (!Array.isArray(input)) input = [input]; if (input.length === 0) return []; let accounts: Account[] = []; const errors: string[] = []; const readResults = await Promise.all(input.map((input) => this.readAccountsFromInput(input))); for (const result of readResults) { accounts.push(...result.values); errors.push(...result.errors); } accounts = this.removeDuplicates(accounts); if (errors.length > 0 && accounts.length > 0) { this.logger.warn(`The following account sources are invalid:\n${errors.join('\n')}`); await delay(1000); const { confirm } = await inquirer.prompt({ type: 'confirm', name: 'confirm', message: 'Continue with the valid accounts?', default: false, }); if (!confirm) throw new Error('Aborted by user'); } return accounts; } public assignSecretsToAccounts(
accounts: Account[], secrets: Secrets[]) {
const secretsMap = new Map<string, Secrets>(); for (const secret of secrets) { secretsMap.set(secret.username, secret); // some existing steam-oriented apps are case-insensitive to usernames in secrets secretsMap.set(secret.username.toLowerCase(), secret); } for (const account of accounts) { let secret = secretsMap.get(account.username); if (!secret) secret = secretsMap.get(account.username.toLowerCase()); if (!secret) continue; account.sharedSecret = secret.sharedSecret; account.identitySecret = secret.identitySecret; } } private removeDuplicates(accounts: Account[]) { const map = new Map<string, Account>(); for (const account of accounts) map.set(account.username, account); return [...map.values()]; } private async readAccountsFromInput(input: string) { const inputType = await this.inferInputType(input); if (inputType === 'file') return this.readAccountsFromFile(input); if (inputType === 'string') return this.readAccountFromString(input); if (inputType === 'directory') return { values: [], errors: [input] }; } private async readAccountsFromFile(filePath: string) { const result: { values: Account[]; errors: string[] } = { values: [], errors: [] }; try { let content = await this.readFilesQueue.add(() => fs.readFile(filePath, 'utf-8')); content = content.trim(); if (content.length === 0) throw new Error('Empty file'); // session file if (filePath.endsWith('.steamsession')) { const readResults = this.readAccountFromSessionFile(content); result.values.push(...readResults.values); if (readResults.errors.length > 0) result.errors.push(filePath); return result; } // asf json if (filePath.endsWith('.json') && content.includes('"SteamLogin"')) { const readResults = this.readAccountFromAsfJson(content); result.values.push(...readResults.values); if (readResults.errors.length > 0) result.errors.push(filePath); return result; } // plain text if (content.includes(':')) { const lines = content .split(/\s+|\r?\n/) .map((l) => l.trim()) .filter((l) => l.length > 0); if (lines.length === 0) throw new Error('Empty file'); for (const line of lines) { const readResults = this.readAccountFromString(line); result.values.push(...readResults.values); if (readResults.errors.length > 0) result.errors.push(line); } return result; } throw new Error('Unsupported file format'); } catch (error) { result.errors.push(filePath); } return result; } private readAccountFromString(str: string) { const result: { values: Account[]; errors: string[] } = { values: [], errors: [] }; try { const account = new Account(str); result.values.push(account); } catch (error) { result.errors.push(str); } return result; } private readAccountFromAsfJson(fileContent: string) { const result: { values: Account[]; errors: string[] } = { values: [], errors: [] }; try { const { SteamLogin: username, SteamPassword: password } = JSON.parse(fileContent); if (!username) throw new Error('Invalid username'); if (!password) throw new Error('Invalid password'); const account = new Account(`${username}:${password}`); result.values.push(account); } catch (error) { result.errors.push(fileContent); } return result; } private readAccountFromSessionFile(fileContent: string) { const result: { values: Account[]; errors: string[] } = { values: [], errors: [] }; try { const { Username, Password, SharedSecret, IdentitySecret } = JSON.parse(fileContent); if (!Username) throw new Error('Invalid username'); if (!Password) throw new Error('Invalid password'); const account = new Account(`${Username}:${Password}:${SharedSecret || ''}:${IdentitySecret || ''}`); result.values.push(account); } catch (error) { result.errors.push(fileContent); } return result; } private async inferInputType(input: string) { if (typeof input !== 'string') throw new Error(`Invalid input type: '${typeof input}'`); try { const stats = await fs.stat(input); if (stats.isFile()) return 'file'; if (stats.isDirectory()) return 'directory'; } catch (error) { return 'string'; } } }
src/modules/accounts-import/accounts-import.service.ts
Sadzurami-steam-sessions-creator-97b7294
[ { "filename": "src/commands/create/create-sessions.command.ts", "retrieved_chunk": " description: 'Overwrite existing sessions.',\n defaultValue: false,\n })\n private parseOverwriteOption(val: string) {\n return new CliUtilityService().parseBoolean(val);\n }\n @Help('afterAll')\n private displayExamples() {\n return `\nExamples:", "score": 0.8389394283294678 }, { "filename": "src/commands/create/create-sessions.service.ts", "retrieved_chunk": " constructor(private readonly steamTokensService: SteamTokensService, private readonly configService: ConfigService) {}\n public onModuleInit() {\n this.schemaVersion = this.configService.getOrThrow<number>('session.schemaVersion');\n }\n public async createSession(account: Account) {\n try {\n // we need to wait at least 30 seconds between each refresh token creation\n // because steam has a limit of logins for one account once per 30 seconds\n // probably it's fair only for accounts with 2FA enabled\n const delayMs = 1000 * 31;", "score": 0.8297464847564697 }, { "filename": "src/modules/secrets-import/secrets-import.service.ts", "retrieved_chunk": " default: false,\n });\n if (!confirm) throw new Error('Aborted by user');\n }\n return secrets;\n }\n private removeDuplicates(secrets: Secrets[]) {\n const map = new Map<string, Secrets>();\n for (const secret of secrets) map.set(secret.username, secret);\n return [...map.values()];", "score": 0.8278566598892212 }, { "filename": "src/commands/create/create-sessions.command.ts", "retrieved_chunk": " private readonly secretsImportService: SecretsImportService,\n private readonly proxiesImportService: ProxiesImportService,\n private readonly proxiesService: ProxiesService,\n ) {\n super();\n }\n public async run(args: string[], options: CreateCommandOptions) {\n try {\n const accountsOptionInput = await this.normalizeInput(options.accounts);\n let accounts = await this.accountsImportService.loadAccounts(accountsOptionInput);", "score": 0.8239374160766602 }, { "filename": "src/commands/validate/validate-sessions.command.ts", "retrieved_chunk": " description: `Specify one or more sessions.\nSession can be specified as:\n- A file path to load session from.\n- A glob pattern to load sessions from multiple files.`,\n })\n private parseSessionsOption(val: string, accumulator: string[] = []) {\n accumulator.push(val);\n return accumulator;\n }\n @Help('afterAll')", "score": 0.8210427165031433 } ]
typescript
accounts: Account[], secrets: Secrets[]) {
import { describe, it, expect, beforeEach } from '@jest/globals'; import { TextNode } from './index'; import { createInlineToolName, FormattingNode } from '../FormattingNode'; import type { ParentNode } from '../interfaces'; describe('TextNode', () => { const initialText = 'initial text'; const text = 'some text'; const parentMock = { insertAfter: jest.fn(), removeChild: jest.fn(), append: jest.fn(), children: [], } as unknown as ParentNode; let node: TextNode; beforeEach(() => { node = new TextNode({ value: initialText, parent: parentMock as FormattingNode, }); }); it('should have empty value by default', () => { node = new TextNode(); expect(node.getText()).toEqual(''); }); describe('.insertText()', () => { it('should set text to value if node is empty', () => { node = new TextNode(); node.insertText(text); expect(node.getText()).toEqual(text); }); it('should append text if not empty', () => { node.insertText(text); expect(node.getText()).toEqual(initialText + text); }); it('should prepend text if index is 0 and node is not empty', () => { node.insertText(text, 0); expect(node.getText()).toEqual(text + initialText); }); it('should insert text at index if not empty', () => { const index = 5; node.insertText(text, index); expect(node.getText()).toEqual(initialText.slice(0, index) + text + initialText.slice(index)); }); it('should throw an error if index is less than 0', () => { const f = (): void => node.insertText(text, -1); expect(f).toThrowError(); }); it('should throw an error if index is greater than node length', () => { const f = (): void => node.insertText(text, initialText.length + 1); expect(f).toThrowError(); }); }); describe('.getText()', () => { it('should return sliced value if start provided', () => { const start = 5; expect(node.getText(start)).toEqual(initialText.slice(start)); }); it('should return sliced value if end provided', () => { const end = 6; expect(node.getText(0, end)).toEqual(initialText.slice(0, end)); }); it('should return sliced value if full range provided', () => { const start = 3; const end = 9; expect(node.getText(start, end)).toEqual(initialText.slice(start, end)); }); it('should throw an error if start is invalid index', () => { expect(() => node.getText(-1)).toThrowError(); expect(() => node.getText(initialText.length + 1)).toThrowError(); }); it('should throw an error if end is invalid index', () => { expect(() => node.getText(0, initialText.length + 1)).toThrowError(); }); it('should throw an error if end index is greater than start index', () => { const start = 5; const end = 3; expect(() => node.getText(start, end)).toThrowError(); }); it('should not throw an error if end index is equal to start index', () => { const start = 5; const end = 5; expect(() => node.getText(start, end)).not.toThrowError(); }); }); describe('.removeText()', () => { it('should remove all text by default', () => { node.removeText(); expect(node.getText()).toEqual(''); }); it('should remove text from specified index', () => { const start = 3; node.removeText(start); expect(node.getText()).toEqual(initialText.slice(0, start)); }); it('should remove text from 0 to specified end index', () => { const end = 8; node.removeText(0, end); expect(node.getText()).toEqual(initialText.slice(end)); }); it('should remove text from specified start and end indecies', () => { const start = 3; const end = 8; node.removeText(start, end); expect(node.getText()).toEqual(initialText.slice(0, start) + initialText.slice(end)); }); it('should call remove() method if node is empty after removeText() call', () => { jest.spyOn(node, 'remove'); node.removeText(); expect(node.remove).toBeCalled(); }); }); describe('.format()', () => { it('should return just one FormattingNode, if formatting full TextNode', () => { const name = createInlineToolName('bold'); const
fragments = node.format(name, 0, initialText.length);
expect(fragments).toHaveLength(1); expect(fragments[0]).toBeInstanceOf(FormattingNode); }); it('should return two fragments if formatting from the start, but not to the end', () => { const name = createInlineToolName('bold'); const end = 5; const fragments = node.format(name, 0, end); expect(fragments).toHaveLength(2); expect(fragments[0]).toBeInstanceOf(FormattingNode); expect(fragments[1]).toBeInstanceOf(TextNode); }); it('should return two fragments if formatting to the end, but not from the start', () => { const name = createInlineToolName('bold'); const start = 5; const fragments = node.format(name, start, initialText.length); expect(fragments).toHaveLength(2); expect(fragments[0]).toBeInstanceOf(TextNode); expect(fragments[1]).toBeInstanceOf(FormattingNode); }); it('should return three fragments if formatting in the middle', () => { const name = createInlineToolName('bold'); const start = 5; const end = 8; const fragments = node.format(name, start, end); // eslint-disable-next-line @typescript-eslint/no-magic-numbers expect(fragments).toHaveLength(3); expect(fragments[0]).toBeInstanceOf(TextNode); expect(fragments[1]).toBeInstanceOf(FormattingNode); expect(fragments[2]).toBeInstanceOf(TextNode); }); it('should return FormattingNode with a TextNode as a child with correct text value', () => { const name = createInlineToolName('bold'); const start = 5; const end = 8; const fragments = node.format(name, start, end); const formattingNode = fragments[1] as FormattingNode; expect(formattingNode.children[0].getText()).toEqual(initialText.slice(start, end)); }); it('should call parent\'s insertAfter with new nodes', () => { const name = createInlineToolName('bold'); const start = 5; const end = 8; const fragments = node.format(name, start, end); expect(parentMock.insertAfter).toBeCalledWith(node, ...fragments); }); }); describe('.split()', () => { const index = 5; it('should not split (return null) if index is 0', () => { const newNode = node.split(0); expect(newNode).toBeNull(); }); it('should not split (return null) if index equals text length', () => { const newNode = node.split(initialText.length); expect(newNode).toBeNull(); }); it('should create new TextNode on split', () => { const newNode = node.split(index); expect(newNode).toBeInstanceOf(TextNode); }); it('should create new TextNode with text value splitted from the original one', () => { const newNode = node.split(index); expect(newNode?.getText()).toEqual(initialText.slice(index)); }); it('should remove split text value from the original node', () => { node.split(index); expect(node.getText()).toEqual(initialText.slice(0, index)); }); it('should insert new node to the parent', () => { const newNode = node.split(index); expect(parentMock.insertAfter).toBeCalledWith(node, newNode); }); }); describe('.serialized', () => { it('should return text value and empty array of fragments', () => { const result = node.serialized; expect(result).toEqual({ text: initialText, fragments: [], }); }); }); });
src/entities/TextNode/TextNode.spec.ts
editor-js-document-model-4cb9623
[ { "filename": "src/entities/FormattingNode/FormattingNode.spec.ts", "retrieved_chunk": " const newNode = node.split(index);\n expect(parentMock.insertAfter).toBeCalledWith(node, newNode);\n });\n });\n describe('.format()', () => {\n const start = 3;\n const end = 5;\n it('should apply formatting to the relevant child', () => {\n node.format(anotherTool, start, end);\n expect(childMock.format).toBeCalledWith(anotherTool, start, end, undefined);", "score": 0.8819068670272827 }, { "filename": "src/entities/BlockNode/BlockNode.spec.ts", "retrieved_chunk": " },\n parent: {} as EditorDocument,\n });\n blockNode.serialized;\n spyArray.forEach((spy) => {\n expect(spy).toHaveBeenCalled();\n });\n });\n it('should call .serialized getter of all child TextNodes associated with the BlockNode', () => {\n const countOfTextNodes = 3;", "score": 0.8637586236000061 }, { "filename": "src/entities/FormattingNode/FormattingNode.spec.ts", "retrieved_chunk": " removeSpy.mockRestore();\n lengthSpy.mockRestore();\n });\n });\n describe('.getText()', () => {\n const start = 3;\n const end = 5;\n it('should call getText() for the relevant child', () => {\n node.getText(start, end);\n expect(childMock.getText).toBeCalledWith(start, end);", "score": 0.8580262660980225 }, { "filename": "src/entities/FormattingNode/FormattingNode.spec.ts", "retrieved_chunk": " });\n it('should return array of new formatting nodes', () => {\n const result = node.format(anotherTool, start, end);\n expect(result).toEqual(childMock.format(anotherTool, start, end));\n });\n });\n});", "score": 0.8569796085357666 }, { "filename": "src/entities/FormattingNode/FormattingNode.spec.ts", "retrieved_chunk": " },\n ],\n });\n });\n });\n describe('.insertText()', () => {\n const newText = 'new text';\n const index = 3;\n it('should lead calling insertText() of the child with the passed index', () => {\n node.insertText(newText, index);", "score": 0.8510861992835999 } ]
typescript
fragments = node.format(name, 0, initialText.length);
import { describe, it, expect, beforeEach } from '@jest/globals'; import { TextNode } from './index'; import { createInlineToolName, FormattingNode } from '../FormattingNode'; import type { ParentNode } from '../interfaces'; describe('TextNode', () => { const initialText = 'initial text'; const text = 'some text'; const parentMock = { insertAfter: jest.fn(), removeChild: jest.fn(), append: jest.fn(), children: [], } as unknown as ParentNode; let node: TextNode; beforeEach(() => { node = new TextNode({ value: initialText, parent: parentMock as FormattingNode, }); }); it('should have empty value by default', () => { node = new TextNode(); expect(node.getText()).toEqual(''); }); describe('.insertText()', () => { it('should set text to value if node is empty', () => { node = new TextNode(); node.insertText(text); expect(node.getText()).toEqual(text); }); it('should append text if not empty', () => { node.insertText(text); expect(node.getText()).toEqual(initialText + text); }); it('should prepend text if index is 0 and node is not empty', () => { node.insertText(text, 0); expect(node.getText()).toEqual(text + initialText); }); it('should insert text at index if not empty', () => { const index = 5; node.insertText(text, index); expect(node.getText()).toEqual(initialText.slice(0, index) + text + initialText.slice(index)); }); it('should throw an error if index is less than 0', () => { const f = (): void => node.insertText(text, -1); expect(f).toThrowError(); }); it('should throw an error if index is greater than node length', () => { const f = (): void => node.insertText(text, initialText.length + 1); expect(f).toThrowError(); }); }); describe('.getText()', () => { it('should return sliced value if start provided', () => { const start = 5; expect(node.getText(start)).toEqual(initialText.slice(start)); }); it('should return sliced value if end provided', () => { const end = 6; expect(node.getText(0, end)).toEqual(initialText.slice(0, end)); }); it('should return sliced value if full range provided', () => { const start = 3; const end = 9; expect(node.getText(start, end)).toEqual(initialText.slice(start, end)); }); it('should throw an error if start is invalid index', () => { expect(() => node.getText(-1)).toThrowError(); expect(() => node.getText(initialText.length + 1)).toThrowError(); }); it('should throw an error if end is invalid index', () => { expect(() => node.getText(0, initialText.length + 1)).toThrowError(); }); it('should throw an error if end index is greater than start index', () => { const start = 5; const end = 3; expect(() => node.getText(start, end)).toThrowError(); }); it('should not throw an error if end index is equal to start index', () => { const start = 5; const end = 5; expect(() => node.getText(start, end)).not.toThrowError(); }); }); describe('.removeText()', () => { it('should remove all text by default', () => { node.removeText(); expect(node.getText()).toEqual(''); }); it('should remove text from specified index', () => { const start = 3; node.removeText(start); expect(node.getText()).toEqual(initialText.slice(0, start)); }); it('should remove text from 0 to specified end index', () => { const end = 8; node.removeText(0, end); expect(node.getText()).toEqual(initialText.slice(end)); }); it('should remove text from specified start and end indecies', () => { const start = 3; const end = 8; node.removeText(start, end); expect(node.getText()).toEqual(initialText.slice(0, start) + initialText.slice(end)); }); it('should call remove() method if node is empty after removeText() call', () => {
jest.spyOn(node, 'remove');
node.removeText(); expect(node.remove).toBeCalled(); }); }); describe('.format()', () => { it('should return just one FormattingNode, if formatting full TextNode', () => { const name = createInlineToolName('bold'); const fragments = node.format(name, 0, initialText.length); expect(fragments).toHaveLength(1); expect(fragments[0]).toBeInstanceOf(FormattingNode); }); it('should return two fragments if formatting from the start, but not to the end', () => { const name = createInlineToolName('bold'); const end = 5; const fragments = node.format(name, 0, end); expect(fragments).toHaveLength(2); expect(fragments[0]).toBeInstanceOf(FormattingNode); expect(fragments[1]).toBeInstanceOf(TextNode); }); it('should return two fragments if formatting to the end, but not from the start', () => { const name = createInlineToolName('bold'); const start = 5; const fragments = node.format(name, start, initialText.length); expect(fragments).toHaveLength(2); expect(fragments[0]).toBeInstanceOf(TextNode); expect(fragments[1]).toBeInstanceOf(FormattingNode); }); it('should return three fragments if formatting in the middle', () => { const name = createInlineToolName('bold'); const start = 5; const end = 8; const fragments = node.format(name, start, end); // eslint-disable-next-line @typescript-eslint/no-magic-numbers expect(fragments).toHaveLength(3); expect(fragments[0]).toBeInstanceOf(TextNode); expect(fragments[1]).toBeInstanceOf(FormattingNode); expect(fragments[2]).toBeInstanceOf(TextNode); }); it('should return FormattingNode with a TextNode as a child with correct text value', () => { const name = createInlineToolName('bold'); const start = 5; const end = 8; const fragments = node.format(name, start, end); const formattingNode = fragments[1] as FormattingNode; expect(formattingNode.children[0].getText()).toEqual(initialText.slice(start, end)); }); it('should call parent\'s insertAfter with new nodes', () => { const name = createInlineToolName('bold'); const start = 5; const end = 8; const fragments = node.format(name, start, end); expect(parentMock.insertAfter).toBeCalledWith(node, ...fragments); }); }); describe('.split()', () => { const index = 5; it('should not split (return null) if index is 0', () => { const newNode = node.split(0); expect(newNode).toBeNull(); }); it('should not split (return null) if index equals text length', () => { const newNode = node.split(initialText.length); expect(newNode).toBeNull(); }); it('should create new TextNode on split', () => { const newNode = node.split(index); expect(newNode).toBeInstanceOf(TextNode); }); it('should create new TextNode with text value splitted from the original one', () => { const newNode = node.split(index); expect(newNode?.getText()).toEqual(initialText.slice(index)); }); it('should remove split text value from the original node', () => { node.split(index); expect(node.getText()).toEqual(initialText.slice(0, index)); }); it('should insert new node to the parent', () => { const newNode = node.split(index); expect(parentMock.insertAfter).toBeCalledWith(node, newNode); }); }); describe('.serialized', () => { it('should return text value and empty array of fragments', () => { const result = node.serialized; expect(result).toEqual({ text: initialText, fragments: [], }); }); }); });
src/entities/TextNode/TextNode.spec.ts
editor-js-document-model-4cb9623
[ { "filename": "src/entities/FormattingNode/FormattingNode.spec.ts", "retrieved_chunk": " });\n });\n describe('.removeText()', () => {\n const start = 3;\n const end = 5;\n it('should remove text from appropriate child', () => {\n node.removeText(start, end);\n expect(childMock.removeText).toBeCalledWith(start, end);\n });\n it('should adjust indices by child offset', () => {", "score": 0.9139061570167542 }, { "filename": "src/entities/FormattingNode/FormattingNode.spec.ts", "retrieved_chunk": " removeSpy.mockRestore();\n lengthSpy.mockRestore();\n });\n });\n describe('.getText()', () => {\n const start = 3;\n const end = 5;\n it('should call getText() for the relevant child', () => {\n node.getText(start, end);\n expect(childMock.getText).toBeCalledWith(start, end);", "score": 0.9053713083267212 }, { "filename": "src/entities/FormattingNode/FormattingNode.spec.ts", "retrieved_chunk": " const newNode = node.split(index);\n expect(parentMock.insertAfter).toBeCalledWith(node, newNode);\n });\n });\n describe('.format()', () => {\n const start = 3;\n const end = 5;\n it('should apply formatting to the relevant child', () => {\n node.format(anotherTool, start, end);\n expect(childMock.format).toBeCalledWith(anotherTool, start, end, undefined);", "score": 0.876559853553772 }, { "filename": "src/entities/FormattingNode/FormattingNode.spec.ts", "retrieved_chunk": " });\n it('should adjust index by child offset', () => {\n const offset = childMock.length;\n node.getText(offset + start, offset + end);\n expect(anotherChildMock.getText).toBeCalledWith(start, end);\n });\n it('should call getText for all relevant children', () => {\n const offset = childMock.length;\n node.getText(start, offset + end);\n expect(childMock.getText).toBeCalledWith(start, offset);", "score": 0.8692690134048462 }, { "filename": "src/entities/FormattingNode/FormattingNode.spec.ts", "retrieved_chunk": " const offset = childMock.length;\n node.removeText(offset + start, offset + end);\n expect(anotherChildMock.removeText).toBeCalledWith(start, end);\n });\n it('should call removeText for each affected child', () => {\n const offset = childMock.length;\n node.removeText(start, offset + end);\n expect(childMock.removeText).toBeCalledWith(start, offset);\n expect(anotherChildMock.removeText).toBeCalledWith(0, end);\n });", "score": 0.8679492473602295 } ]
typescript
jest.spyOn(node, 'remove');
import { FormattingNode, InlineToolName, InlineToolData } from '../FormattingNode'; import { TextNodeConstructorParameters } from './types'; import { ChildNode, InlineNode, InlineNodeSerialized } from '../interfaces'; export * from './types'; export interface TextNode extends ChildNode {} /** * TextNode class represents a node in a tree-like structure, used to store and manipulate text content. */ @ChildNode export class TextNode implements InlineNode { /** * Private field representing the text content of the node */ #value: string; /** * Constructor for TextNode class * * @param args - TextNode constructor arguments. * @param args.value - Text content of the node. */ constructor({ value = '' }: TextNodeConstructorParameters = {}) { this.#value = value; } /** * Returns length of the text */ public get length(): number { return this.#value.length; } /** * Returns serialized value of the node */ public get serialized(): InlineNodeSerialized { return { text: this.getText(), // No fragments for text node fragments: [], }; } /** * Inserts text to specified position. By default, appends new text to the current value * * @param text - text to insert * @param [index] - char start index */ public insertText(text: string, index = this.length): void { this.#validateIndex(index); this.#value = this.#value.slice(0, index) + text + this.#value.slice(index); } /** * Remove text from specified range * * @param [start] - start char index of the range, 0 by default * @param [end] - end char index of the range, text length by default * @returns {string} removed text */ public removeText(start = 0, end = this.length): string { this.#validateIndex(start); this.#validateIndex(end); const removedValue = this.#value.slice(start, end); this.#value = this.#value.slice(0, start) + this.#value.slice(end); if (this.length === 0) { this.remove(); } return removedValue; } /** * Returns text value from the specified range * * @param [start] - start char index of the range, 0 by default * @param [end] - end char index of the range, text length by default */ public getText(start = 0, end = this.length): string { if (start > end) { // Stryker disable next-line StringLiteral throw new Error(`Start index ${start} should be less or equal than end index ${end}`); } this.#validateIndex(start); this.#validateIndex(end); return this.#value.slice(start, end); } /** * Applies inline tool for specified range * * @param tool - name of the tool to apply * @param start - start char index of the range * @param end - end char index of the range * @param [data] - inline tool data if applicable * @returns {InlineNode[]} - array of nodes after applied formatting */ public format(tool: InlineToolName, start: number, end: number, data?: InlineToolData): InlineNode[] { this.#validateIndex(start); this.#validateIndex(end); const formattingNode = new FormattingNode({ tool, data, }); const fragments: ChildNode[] = []; /** * If start index is greater than 0, we need extract part of the text before the start index */ if (start > 0) { fragments.push(this.#cloneContents(0, start)); } /** * Formatting is applied to the specified range */ const formattedFragment = this.#cloneContents(start, end);
formattedFragment.appendTo(formattingNode);
fragments.push(formattingNode); /** * If end index is less than the text length, we need to extract part of the text after the end index */ if (end < this.length) { fragments.push(this.#cloneContents(end, this.length)); } this.parent?.insertAfter(this, ...fragments); this.remove(); return fragments; } /** * Splits current node into two nodes by the specified index * * @param index - char index where to split * @returns {TextNode|null} - new node or null if split is not applicable */ public split(index: number): TextNode | null { if (index === 0 || index === this.length) { return null; } const newNode = new TextNode(); const text = this.removeText(index); newNode.insertText(text); this.parent?.insertAfter(this, newNode); return newNode; } /** * Validates index * * @param index - char index to validate * @throws Error if index is out of the text length */ #validateIndex(index: number): void { if (index < 0 || index > this.length) { // Stryker disable next-line StringLiteral throw new Error(`Index ${index} is not in valid range [0, ${this.length}]`); } } /** * Clones specified range to a new TextNode * * @param start - start char index of the range * @param end - end char index of the range */ #cloneContents(start: number, end: number): TextNode { return new TextNode({ value: this.getText(start, end), }); } }
src/entities/TextNode/index.ts
editor-js-document-model-4cb9623
[ { "filename": "src/entities/FormattingNode/index.ts", "retrieved_chunk": " * If child is not a FormattingNode, it doesn't include any fragments. So we skip it.\n */\n if (!(child instanceof FormattingNode)) {\n return acc;\n }\n acc.push(...child.getFragments(childStart, childEnd));\n return acc;\n },\n [ {\n tool: this.#tool,", "score": 0.8700141310691833 }, { "filename": "src/entities/FormattingNode/index.ts", "retrieved_chunk": " *\n * @param [start] - start char index of the range, by default 0\n * @param [end] - end char index of the range, by default length of the text value\n */\n public getFragments(start = 0, end = this.length): InlineFragment[] {\n return this.#reduceChildrenInRange<InlineFragment[]>(\n start,\n end,\n (acc, child, childStart, childEnd) => {\n /**", "score": 0.8619170784950256 }, { "filename": "src/entities/FormattingNode/index.ts", "retrieved_chunk": " }\n /**\n * Inserts text to the specified index, by default appends text to the end of the current value\n *\n * @param text - text to insert\n * @param [index] - char index where to insert text\n */\n public insertText(text: string, index = this.length): void {\n const [child, offset] = this.#findChildByIndex(index);\n child?.insertText(text, index - offset);", "score": 0.8511815667152405 }, { "filename": "src/entities/interfaces/InlineNode.ts", "retrieved_chunk": " * @param [start] - start char index of the range\n * @param [end] - end char index of the range\n * @param [data] - Inline Tool data if applicable\n * @returns {InlineNode[]} - array of nodes after applied formatting\n */\n format(name: InlineToolName, start?: number, end?: number, data?: InlineToolData): InlineNode[];\n /**\n * Inserts text at passed char index\n *\n * @param text - text to insert", "score": 0.8495162129402161 }, { "filename": "src/entities/FormattingNode/index.ts", "retrieved_chunk": " }\n /**\n * Removes text form the specified range\n *\n * @param [start] - start char index of the range, by default 0\n * @param [end] - end char index of the range, by default length of the text value\n * @returns {string} removed text\n */\n public removeText(start = 0, end = this.length): string {\n const result = this.#reduceChildrenInRange(", "score": 0.8451998233795166 } ]
typescript
formattedFragment.appendTo(formattingNode);
import { FormattingNode, InlineToolName, InlineToolData } from '../FormattingNode'; import { TextNodeConstructorParameters } from './types'; import { ChildNode, InlineNode, InlineNodeSerialized } from '../interfaces'; export * from './types'; export interface TextNode extends ChildNode {} /** * TextNode class represents a node in a tree-like structure, used to store and manipulate text content. */ @ChildNode export class TextNode implements InlineNode { /** * Private field representing the text content of the node */ #value: string; /** * Constructor for TextNode class * * @param args - TextNode constructor arguments. * @param args.value - Text content of the node. */ constructor({ value = '' }: TextNodeConstructorParameters = {}) { this.#value = value; } /** * Returns length of the text */ public get length(): number { return this.#value.length; } /** * Returns serialized value of the node */ public get serialized(): InlineNodeSerialized { return { text: this.getText(), // No fragments for text node fragments: [], }; } /** * Inserts text to specified position. By default, appends new text to the current value * * @param text - text to insert * @param [index] - char start index */ public insertText(text: string, index = this.length): void { this.#validateIndex(index); this.#value = this.#value.slice(0, index) + text + this.#value.slice(index); } /** * Remove text from specified range * * @param [start] - start char index of the range, 0 by default * @param [end] - end char index of the range, text length by default * @returns {string} removed text */ public removeText(start = 0, end = this.length): string { this.#validateIndex(start); this.#validateIndex(end); const removedValue = this.#value.slice(start, end); this.#value = this.#value.slice(0, start) + this.#value.slice(end); if (this.length === 0) { this.remove(); } return removedValue; } /** * Returns text value from the specified range * * @param [start] - start char index of the range, 0 by default * @param [end] - end char index of the range, text length by default */ public getText(start = 0, end = this.length): string { if (start > end) { // Stryker disable next-line StringLiteral throw new Error(`Start index ${start} should be less or equal than end index ${end}`); } this.#validateIndex(start); this.#validateIndex(end); return this.#value.slice(start, end); } /** * Applies inline tool for specified range * * @param tool - name of the tool to apply * @param start - start char index of the range * @param end - end char index of the range * @param [data] - inline tool data if applicable * @returns {InlineNode[]} - array of nodes after applied formatting */ public format(tool: InlineToolName, start: number, end: number, data?: InlineToolData): InlineNode[] { this.#validateIndex(start); this.#validateIndex(end); const formattingNode = new FormattingNode({ tool, data, }); const fragments: ChildNode[] = []; /** * If start index is greater than 0, we need extract part of the text before the start index */ if (start > 0) { fragments.push(this.#cloneContents(0, start)); } /** * Formatting is applied to the specified range */ const formattedFragment = this.#cloneContents(start, end); formattedFragment.appendTo(formattingNode);
fragments.push(formattingNode);
/** * If end index is less than the text length, we need to extract part of the text after the end index */ if (end < this.length) { fragments.push(this.#cloneContents(end, this.length)); } this.parent?.insertAfter(this, ...fragments); this.remove(); return fragments; } /** * Splits current node into two nodes by the specified index * * @param index - char index where to split * @returns {TextNode|null} - new node or null if split is not applicable */ public split(index: number): TextNode | null { if (index === 0 || index === this.length) { return null; } const newNode = new TextNode(); const text = this.removeText(index); newNode.insertText(text); this.parent?.insertAfter(this, newNode); return newNode; } /** * Validates index * * @param index - char index to validate * @throws Error if index is out of the text length */ #validateIndex(index: number): void { if (index < 0 || index > this.length) { // Stryker disable next-line StringLiteral throw new Error(`Index ${index} is not in valid range [0, ${this.length}]`); } } /** * Clones specified range to a new TextNode * * @param start - start char index of the range * @param end - end char index of the range */ #cloneContents(start: number, end: number): TextNode { return new TextNode({ value: this.getText(start, end), }); } }
src/entities/TextNode/index.ts
editor-js-document-model-4cb9623
[ { "filename": "src/entities/FormattingNode/index.ts", "retrieved_chunk": " * If child is not a FormattingNode, it doesn't include any fragments. So we skip it.\n */\n if (!(child instanceof FormattingNode)) {\n return acc;\n }\n acc.push(...child.getFragments(childStart, childEnd));\n return acc;\n },\n [ {\n tool: this.#tool,", "score": 0.8454420566558838 }, { "filename": "src/entities/FormattingNode/index.ts", "retrieved_chunk": " if (splitNode || (index - offset === childLength)) {\n midNodeIndex += 1;\n }\n newNode.append(...this.children.slice(midNodeIndex));\n this.parent?.insertAfter(this, newNode);\n return newNode;\n }\n /**\n * Applies formatting to the text with specified inline tool in the specified range\n *", "score": 0.8291147947311401 }, { "filename": "src/entities/FormattingNode/index.ts", "retrieved_chunk": " return this.children.reduce((sum, child) => sum + child.length, 0);\n }\n /**\n * Returns serialized value of the node: text and formatting fragments\n */\n public get serialized(): InlineNodeSerialized {\n return {\n text: this.getText(),\n fragments: this.getFragments(),\n };", "score": 0.8081046938896179 }, { "filename": "src/entities/TextNode/TextNode.spec.ts", "retrieved_chunk": " });\n describe('.format()', () => {\n it('should return just one FormattingNode, if formatting full TextNode', () => {\n const name = createInlineToolName('bold');\n const fragments = node.format(name, 0, initialText.length);\n expect(fragments).toHaveLength(1);\n expect(fragments[0]).toBeInstanceOf(FormattingNode);\n });\n it('should return two fragments if formatting from the start, but not to the end', () => {\n const name = createInlineToolName('bold');", "score": 0.8070468902587891 }, { "filename": "src/entities/FormattingNode/index.ts", "retrieved_chunk": " */\n public split(index: number): FormattingNode | null {\n if (index === 0 || index === this.length) {\n return null;\n }\n const newNode = new FormattingNode({\n tool: this.#tool,\n data: this.#data,\n });\n const [child, offset] = this.#findChildByIndex(index);", "score": 0.8044763207435608 } ]
typescript
fragments.push(formattingNode);
import { describe, it, expect, beforeEach, jest } from '@jest/globals'; import { ChildNode } from './ChildNode'; import type { ParentNode } from './ParentNode'; const parentMock = { append: jest.fn(), removeChild: jest.fn(), insertAfter: jest.fn(), children: [], } as unknown as ParentNode; interface Dummy extends ChildNode { } /** * Dummy Node's class */ @ChildNode class Dummy { /** * * @param _options - dummy options */ // eslint-disable-next-line @typescript-eslint/no-empty-function,@typescript-eslint/no-unused-vars,no-unused-vars constructor(_options?: unknown) {} } describe('ChildNode decorator', () => { let dummy: Dummy; beforeEach(() => { jest.resetAllMocks(); }); it('should decorated class to a parent', () => { dummy = new Dummy({ parent: parentMock }); expect(parentMock.append).toBeCalledWith(dummy); }); it('should add remove method to the decorated class', () => { expect
(dummy.remove).toBeInstanceOf(Function);
}); it('should add appendTo method to the decorated class', () => { expect(dummy.appendTo).toBeInstanceOf(Function); }); describe('.parent', () => { it('should return null by default', () => { dummy = new Dummy(); expect(dummy.parent).toBeNull(); }); it('should return parent passed via constructor', () => { dummy = new Dummy({ parent: parentMock }); expect(dummy.parent).toEqual(parentMock); }); }); describe('.remove()', () => { beforeEach(() => { dummy = new Dummy({ parent: parentMock, }); }); it('should call parent\'s removeChild method', () => { dummy.remove(); expect(parentMock.removeChild).toBeCalledWith(dummy); }); it('should set node\'s parent to null', () => { dummy.remove(); expect(dummy.parent).toBeNull(); }); }); describe('.appendTo()', () => { beforeEach(() => { dummy = new Dummy(); }); it('should call parent\'s append method on appendTo call', () => { dummy.appendTo(parentMock); expect(parentMock.append).toBeCalledWith(dummy); }); it('should set node\'s parent on appendTo call', () => { dummy.appendTo(parentMock); expect(dummy.parent).toBe(parentMock); }); it('should do nothing if parents are the same', () => { const dummyWithParent = new Dummy({ parent: parentMock, }); jest.resetAllMocks(); dummyWithParent.appendTo(parentMock); expect(parentMock.append).not.toBeCalled(); }); }); });
src/entities/interfaces/ChildNode.spec.ts
editor-js-document-model-4cb9623
[ { "filename": "src/entities/interfaces/ParentNode.spec.ts", "retrieved_chunk": " it('should add removeChild method to the decorated class', () => {\n expect(dummy.removeChild).toBeInstanceOf(Function);\n });\n it('should add append method to the decorated class', () => {\n expect(dummy.append).toBeInstanceOf(Function);\n });\n it('should add insertAfter method to the decorated class', () => {\n expect(dummy.insertAfter).toBeInstanceOf(Function);\n });\n describe('constructor', () => {", "score": 0.9135858416557312 }, { "filename": "src/entities/interfaces/ParentNode.spec.ts", "retrieved_chunk": " it('should append passed children to new parent', () => {\n const childMock = createChildMock();\n dummy = new Dummy({\n children: [ childMock ],\n });\n expect(childMock.appendTo).toBeCalledWith(dummy);\n });\n });\n describe('.children', () => {\n it('should return empty array by default', () => {", "score": 0.887922465801239 }, { "filename": "src/entities/interfaces/ParentNode.spec.ts", "retrieved_chunk": " dummy = new Dummy({\n children: [ childMock ],\n });\n dummy.removeChild(childMock);\n expect(dummy.children).toHaveLength(0);\n });\n it('should call remove method of child', () => {\n const childMock = createChildMock();\n dummy = new Dummy({\n children: [ childMock ],", "score": 0.8851749897003174 }, { "filename": "src/entities/interfaces/ParentNode.spec.ts", "retrieved_chunk": " expect(dummy.children).toEqual([]);\n });\n it('should return children passed via constructor', () => {\n const childMock = createChildMock();\n dummy = new Dummy({\n children: [ childMock ],\n });\n expect(dummy.children).toEqual([ childMock ]);\n });\n });", "score": 0.8833327293395996 }, { "filename": "src/entities/interfaces/integration.spec.ts", "retrieved_chunk": " let anotherParent: DummyParent;\n let child: DummyChild;\n beforeEach(() => {\n parent = new DummyParent();\n child = new DummyChild({ parent });\n anotherParent = new DummyParent();\n });\n it('should remove child from the old parent on new parent.append() call', () => {\n anotherParent.append(child);\n expect(parent.children).not.toContain(child);", "score": 0.8816500902175903 } ]
typescript
(dummy.remove).toBeInstanceOf(Function);
import { describe, it, expect, beforeEach, jest } from '@jest/globals'; import { ParentNode } from './ParentNode'; import type { ChildNode } from './ChildNode'; const createChildMock = (): ChildNode => { return { appendTo: jest.fn(), remove: jest.fn(), parent: null, } as unknown as ChildNode; }; interface Dummy extends ParentNode { } /** * */ @ParentNode class Dummy { /** * * @param _options - dummy options */ // eslint-disable-next-line @typescript-eslint/no-empty-function,@typescript-eslint/no-unused-vars,no-unused-vars constructor(_options?: unknown) {} } describe('ParentNode decorator', () => { let dummy: Dummy; beforeEach(() => { dummy = new Dummy(); jest.resetAllMocks(); }); it('should add removeChild method to the decorated class', () => { expect(dummy.removeChild).toBeInstanceOf(Function); }); it('should add append method to the decorated class', () => { expect(dummy
.append).toBeInstanceOf(Function);
}); it('should add insertAfter method to the decorated class', () => { expect(dummy.insertAfter).toBeInstanceOf(Function); }); describe('constructor', () => { it('should append passed children to new parent', () => { const childMock = createChildMock(); dummy = new Dummy({ children: [ childMock ], }); expect(childMock.appendTo).toBeCalledWith(dummy); }); }); describe('.children', () => { it('should return empty array by default', () => { expect(dummy.children).toEqual([]); }); it('should return children passed via constructor', () => { const childMock = createChildMock(); dummy = new Dummy({ children: [ childMock ], }); expect(dummy.children).toEqual([ childMock ]); }); }); describe('.append()', () => { it('should add child to the children array', () => { const childMock = createChildMock(); dummy.append(childMock); expect(dummy.children).toContain(childMock); }); it('should add several children to the children array', () => { const childMock = createChildMock(); const anotherChildMock = createChildMock(); dummy.append(childMock, anotherChildMock); expect(dummy.children).toEqual([childMock, anotherChildMock]); }); it('should move a child to the end of children array if it is already there', () => { const childMock = createChildMock(); const anotherChildMock = createChildMock(); const oneMoreChildMock = createChildMock(); dummy = new Dummy({ children: [childMock, anotherChildMock, oneMoreChildMock], }); dummy.append(anotherChildMock); expect(dummy.children).toEqual([childMock, oneMoreChildMock, anotherChildMock]); }); it('should preserve already existing children', () => { const childMock = createChildMock(); const anotherChildMock = createChildMock(); const oneMoreChildMock = createChildMock(); dummy = new Dummy({ children: [childMock, anotherChildMock], }); dummy.append(oneMoreChildMock); expect(dummy.children).toEqual([childMock, anotherChildMock, oneMoreChildMock]); }); }); describe('.insertAfter()', () => { it('should insert a child after passed target', () => { const childMock = createChildMock(); const anotherChildMock = createChildMock(); const childMockToInsert = createChildMock(); dummy = new Dummy({ children: [childMock, anotherChildMock], }); dummy.insertAfter(childMock, childMockToInsert); expect(dummy.children).toEqual([childMock, childMockToInsert, anotherChildMock]); }); it('should insert several children after passed target', () => { const childMock = createChildMock(); const anotherChildMock = createChildMock(); const childMockToInsert = createChildMock(); const anotherChildMockToInsert = createChildMock(); dummy = new Dummy({ children: [childMock, anotherChildMock], }); dummy.insertAfter(childMock, childMockToInsert, anotherChildMockToInsert); expect(dummy.children).toEqual([childMock, childMockToInsert, anotherChildMockToInsert, anotherChildMock]); }); it('should remove existing child and insert it to the new place', () => { const childMock = createChildMock(); const anotherChildMock = createChildMock(); const oneMoreChildMock = createChildMock(); const childMockToInsert = createChildMock(); dummy = new Dummy({ children: [childMock, anotherChildMock, oneMoreChildMock, childMockToInsert], }); dummy.insertAfter(anotherChildMock, childMockToInsert); expect(dummy.children).toEqual([childMock, anotherChildMock, childMockToInsert, oneMoreChildMock]); }); }); describe('.removeChild()', () => { it('should remove child from the children array', () => { const childMock = createChildMock(); dummy = new Dummy({ children: [ childMock ], }); dummy.removeChild(childMock); expect(dummy.children).toHaveLength(0); }); it('should call remove method of child', () => { const childMock = createChildMock(); dummy = new Dummy({ children: [ childMock ], }); dummy.removeChild(childMock); expect(childMock.remove).toBeCalled(); }); }); });
src/entities/interfaces/ParentNode.spec.ts
editor-js-document-model-4cb9623
[ { "filename": "src/entities/interfaces/ChildNode.spec.ts", "retrieved_chunk": " expect(parentMock.append).toBeCalledWith(dummy);\n });\n it('should add remove method to the decorated class', () => {\n expect(dummy.remove).toBeInstanceOf(Function);\n });\n it('should add appendTo method to the decorated class', () => {\n expect(dummy.appendTo).toBeInstanceOf(Function);\n });\n describe('.parent', () => {\n it('should return null by default', () => {", "score": 0.9545899033546448 }, { "filename": "src/entities/interfaces/ChildNode.spec.ts", "retrieved_chunk": " dummy = new Dummy({\n parent: parentMock,\n });\n });\n it('should call parent\\'s removeChild method', () => {\n dummy.remove();\n expect(parentMock.removeChild).toBeCalledWith(dummy);\n });\n it('should set node\\'s parent to null', () => {\n dummy.remove();", "score": 0.9063429832458496 }, { "filename": "src/entities/interfaces/ChildNode.spec.ts", "retrieved_chunk": " dummy = new Dummy();\n expect(dummy.parent).toBeNull();\n });\n it('should return parent passed via constructor', () => {\n dummy = new Dummy({ parent: parentMock });\n expect(dummy.parent).toEqual(parentMock);\n });\n });\n describe('.remove()', () => {\n beforeEach(() => {", "score": 0.9014962911605835 }, { "filename": "src/entities/interfaces/ChildNode.spec.ts", "retrieved_chunk": " });\n it('should set node\\'s parent on appendTo call', () => {\n dummy.appendTo(parentMock);\n expect(dummy.parent).toBe(parentMock);\n });\n it('should do nothing if parents are the same', () => {\n const dummyWithParent = new Dummy({\n parent: parentMock,\n });\n jest.resetAllMocks();", "score": 0.8981985449790955 }, { "filename": "src/entities/interfaces/ChildNode.spec.ts", "retrieved_chunk": " expect(dummy.parent).toBeNull();\n });\n });\n describe('.appendTo()', () => {\n beforeEach(() => {\n dummy = new Dummy();\n });\n it('should call parent\\'s append method on appendTo call', () => {\n dummy.appendTo(parentMock);\n expect(parentMock.append).toBeCalledWith(dummy);", "score": 0.8966317176818848 } ]
typescript
.append).toBeInstanceOf(Function);
import { describe, it, expect, beforeEach, jest } from '@jest/globals'; import { ParentNode } from './ParentNode'; import type { ChildNode } from './ChildNode'; const createChildMock = (): ChildNode => { return { appendTo: jest.fn(), remove: jest.fn(), parent: null, } as unknown as ChildNode; }; interface Dummy extends ParentNode { } /** * */ @ParentNode class Dummy { /** * * @param _options - dummy options */ // eslint-disable-next-line @typescript-eslint/no-empty-function,@typescript-eslint/no-unused-vars,no-unused-vars constructor(_options?: unknown) {} } describe('ParentNode decorator', () => { let dummy: Dummy; beforeEach(() => { dummy = new Dummy(); jest.resetAllMocks(); }); it('should add removeChild method to the decorated class', () => { expect(dummy.removeChild).toBeInstanceOf(Function); }); it('should add append method to the decorated class', () => { expect(dummy.append).toBeInstanceOf(Function); }); it('should add insertAfter method to the decorated class', () => { expect(dummy.insertAfter).toBeInstanceOf(Function); }); describe('constructor', () => { it('should append passed children to new parent', () => { const childMock = createChildMock(); dummy = new Dummy({ children: [ childMock ], }); expect(childMock
.appendTo).toBeCalledWith(dummy);
}); }); describe('.children', () => { it('should return empty array by default', () => { expect(dummy.children).toEqual([]); }); it('should return children passed via constructor', () => { const childMock = createChildMock(); dummy = new Dummy({ children: [ childMock ], }); expect(dummy.children).toEqual([ childMock ]); }); }); describe('.append()', () => { it('should add child to the children array', () => { const childMock = createChildMock(); dummy.append(childMock); expect(dummy.children).toContain(childMock); }); it('should add several children to the children array', () => { const childMock = createChildMock(); const anotherChildMock = createChildMock(); dummy.append(childMock, anotherChildMock); expect(dummy.children).toEqual([childMock, anotherChildMock]); }); it('should move a child to the end of children array if it is already there', () => { const childMock = createChildMock(); const anotherChildMock = createChildMock(); const oneMoreChildMock = createChildMock(); dummy = new Dummy({ children: [childMock, anotherChildMock, oneMoreChildMock], }); dummy.append(anotherChildMock); expect(dummy.children).toEqual([childMock, oneMoreChildMock, anotherChildMock]); }); it('should preserve already existing children', () => { const childMock = createChildMock(); const anotherChildMock = createChildMock(); const oneMoreChildMock = createChildMock(); dummy = new Dummy({ children: [childMock, anotherChildMock], }); dummy.append(oneMoreChildMock); expect(dummy.children).toEqual([childMock, anotherChildMock, oneMoreChildMock]); }); }); describe('.insertAfter()', () => { it('should insert a child after passed target', () => { const childMock = createChildMock(); const anotherChildMock = createChildMock(); const childMockToInsert = createChildMock(); dummy = new Dummy({ children: [childMock, anotherChildMock], }); dummy.insertAfter(childMock, childMockToInsert); expect(dummy.children).toEqual([childMock, childMockToInsert, anotherChildMock]); }); it('should insert several children after passed target', () => { const childMock = createChildMock(); const anotherChildMock = createChildMock(); const childMockToInsert = createChildMock(); const anotherChildMockToInsert = createChildMock(); dummy = new Dummy({ children: [childMock, anotherChildMock], }); dummy.insertAfter(childMock, childMockToInsert, anotherChildMockToInsert); expect(dummy.children).toEqual([childMock, childMockToInsert, anotherChildMockToInsert, anotherChildMock]); }); it('should remove existing child and insert it to the new place', () => { const childMock = createChildMock(); const anotherChildMock = createChildMock(); const oneMoreChildMock = createChildMock(); const childMockToInsert = createChildMock(); dummy = new Dummy({ children: [childMock, anotherChildMock, oneMoreChildMock, childMockToInsert], }); dummy.insertAfter(anotherChildMock, childMockToInsert); expect(dummy.children).toEqual([childMock, anotherChildMock, childMockToInsert, oneMoreChildMock]); }); }); describe('.removeChild()', () => { it('should remove child from the children array', () => { const childMock = createChildMock(); dummy = new Dummy({ children: [ childMock ], }); dummy.removeChild(childMock); expect(dummy.children).toHaveLength(0); }); it('should call remove method of child', () => { const childMock = createChildMock(); dummy = new Dummy({ children: [ childMock ], }); dummy.removeChild(childMock); expect(childMock.remove).toBeCalled(); }); }); });
src/entities/interfaces/ParentNode.spec.ts
editor-js-document-model-4cb9623
[ { "filename": "src/entities/interfaces/ChildNode.spec.ts", "retrieved_chunk": " expect(dummy.parent).toBeNull();\n });\n });\n describe('.appendTo()', () => {\n beforeEach(() => {\n dummy = new Dummy();\n });\n it('should call parent\\'s append method on appendTo call', () => {\n dummy.appendTo(parentMock);\n expect(parentMock.append).toBeCalledWith(dummy);", "score": 0.9250514507293701 }, { "filename": "src/entities/interfaces/integration.spec.ts", "retrieved_chunk": " let anotherParent: DummyParent;\n let child: DummyChild;\n beforeEach(() => {\n parent = new DummyParent();\n child = new DummyChild({ parent });\n anotherParent = new DummyParent();\n });\n it('should remove child from the old parent on new parent.append() call', () => {\n anotherParent.append(child);\n expect(parent.children).not.toContain(child);", "score": 0.9154282808303833 }, { "filename": "src/entities/interfaces/ChildNode.spec.ts", "retrieved_chunk": " });\n it('should set node\\'s parent on appendTo call', () => {\n dummy.appendTo(parentMock);\n expect(dummy.parent).toBe(parentMock);\n });\n it('should do nothing if parents are the same', () => {\n const dummyWithParent = new Dummy({\n parent: parentMock,\n });\n jest.resetAllMocks();", "score": 0.9149840474128723 }, { "filename": "src/entities/interfaces/integration.spec.ts", "retrieved_chunk": " describe('child removal', () => {\n let parent: DummyParent;\n let child: DummyChild;\n beforeEach(() => {\n parent = new DummyParent();\n child = new DummyChild({ parent });\n });\n it('should remove child from parent on child.remove() call', () => {\n child.remove();\n expect(parent.children).not.toContain(child);", "score": 0.909160852432251 }, { "filename": "src/entities/interfaces/ChildNode.spec.ts", "retrieved_chunk": " dummy = new Dummy({\n parent: parentMock,\n });\n });\n it('should call parent\\'s removeChild method', () => {\n dummy.remove();\n expect(parentMock.removeChild).toBeCalledWith(dummy);\n });\n it('should set node\\'s parent to null', () => {\n dummy.remove();", "score": 0.9079182147979736 } ]
typescript
.appendTo).toBeCalledWith(dummy);
import glob from 'fast-glob'; import { CliUtilityService, Command, CommandRunner, Help, Option } from 'nest-commander'; import pQueue from 'p-queue'; import path from 'path'; import { setTimeout as delay } from 'timers/promises'; import { Logger } from '@nestjs/common'; import { AccountsImportService } from '../../modules/accounts-import/accounts-import.service'; import { ExportSessionsService } from '../../modules/export-sessions/export-sessions.service'; import { ProxiesImportService } from '../../modules/proxies-import/proxies-import.service'; import { ProxiesService } from '../../modules/proxies/proxies.service'; import { SecretsImportService } from '../../modules/secrets-import/secrets-import.service'; import { CreateSessionsService } from './create-sessions.service'; interface CreateCommandOptions { accounts: string | string[]; secrets: string | string[]; proxies: string | string[]; concurrency: number; output: string; overwrite: boolean; } @Command({ name: 'create', description: 'Creates new sessions', }) export class CreateSessionsCommand extends CommandRunner { private readonly logger = new Logger(CreateSessionsCommand.name); constructor( private readonly createSessionsService: CreateSessionsService, private readonly exportSessionsService: ExportSessionsService, private readonly accountsImportService: AccountsImportService, private readonly secretsImportService: SecretsImportService, private readonly proxiesImportService: ProxiesImportService, private readonly proxiesService: ProxiesService, ) { super(); } public async run(args: string[], options: CreateCommandOptions) { try { const accountsOptionInput = await this.normalizeInput(options.accounts); let accounts = await this.accountsImportService.loadAccounts(accountsOptionInput); if (accounts.length === 0) throw new Error('No accounts found'); this.logger.log(`Accounts: ${accounts.length}`); const secretsOptionInput = await this.normalizeInput(options.secrets);
const secrets = await this.secretsImportService.loadSecrets(secretsOptionInput);
this.logger.log(`Secrets: ${secrets.length}`); const outputOptionInput = options.output; if (!outputOptionInput) throw new Error('Output path is required'); const output = path.resolve(outputOptionInput); await this.exportSessionsService.setOutputPath(output); this.logger.log(`Output: ${output}`); const overwriteExistingSessions = options.overwrite; if (!overwriteExistingSessions) { const sessionsPaths = await this.normalizeInput(`${output}/*`); const existingSessions = await this.accountsImportService.loadAccounts(sessionsPaths); this.logger.log(`Ignoring existing sessions: ${existingSessions.length}`); accounts = accounts.filter((account) => !existingSessions.some((a) => a.username === account.username)); } if (accounts.length === 0) { this.logger.log('No accounts to create'); return; } const proxiesOptionInput = await this.normalizeInput(options.proxies); const proxies = await this.proxiesImportService.loadProxies(proxiesOptionInput); this.proxiesService.setProxies(proxies); this.logger.log(`Proxies: ${proxies.length}`); const concurrencyOptionInput = options.concurrency; const concurrency = proxies.length > 0 ? concurrencyOptionInput || Math.min(proxies.length * 3, 100) : 1; this.logger.log(`Concurrency: ${concurrency}`); this.logger.log(`Starting to create sessions for ${accounts.length} accounts`); this.accountsImportService.assignSecretsToAccounts(accounts, secrets); let success = 0; let fails = 0; let left = accounts.length; const queue = new pQueue({ concurrency, interval: 10, intervalCap: 1 }); for (const account of accounts) { queue.add(async () => { try { const session = await this.createSessionsService.createSession(account); await this.exportSessionsService.exportSession(session); success++; this.logger.log(`Success: ${account.username}, left: ${--left}`); } catch (error) { fails++; this.logger.warn(`Fail: ${account.username}, left: ${--left}`); } }); } await queue.onIdle(); this.logger.log(`Session creation complete`); this.logger.log(`Success: ${success}`); this.logger.log(`Fails: ${fails}`); await delay(1000); } catch (error) { this.logger.error(error.message); } } private async normalizeInput(input: string | string[]) { if (!input) return []; if (!Array.isArray(input)) input = [input]; const filteredInput = input.filter((el) => typeof el === 'string' && el.trim() !== ''); if (filteredInput.length === 0) return []; const nestedData = await Promise.all( filteredInput.map(async (el) => { el = el.trim(); // Possible glob pattern const files = await glob(el); if (files.length > 0) return files; // Possible string return el.split(/\s+|\r?\n/).map((line) => line.trim()); }), ); return nestedData.flat(); } @Option({ required: true, flags: '-a, --accounts <accounts...>', description: `Specify one or more accounts. Account can be specified as: - A simple string. - A file path to load accounts from (one account per line). - A glob pattern to load accounts from multiple files. Supported formats: - username:password - username:password:sharedSecret - username:password:sharedSecret:identitySecret - ASF json`, }) private parseAccountsOption(val: string, accumulator: string[] = []) { accumulator.push(val); return accumulator; } @Option({ flags: '-s, --secrets <secrets...>', description: `Specify one or more secrets. Secret can be specified as: - A file path to load secrets from file. - A glob pattern to load secrets from multiple files. Supported formats: - maFile - ASF db`, }) private parseSecretsOption(val: string, accumulator: string[] = []) { accumulator.push(val); return accumulator; } @Option({ flags: '-p, --proxies <proxies...>', description: `Specify one or more proxies. Proxy can be specified as: - A string in the format <protocol>://<username>:<password>@<host>:<port>. - A file path to load proxies from a text file. Supported protocols: - http - https`, }) private parseProxiesOption(val: string, accumulator: string[] = []) { accumulator.push(val); return accumulator; } @Option({ flags: '-c, --concurrency <concurrency>', description: `Specify the number of concurrent runs. Default: 1, or the number of proxies.`, }) private parseConcurrencyOption(val: string) { const parsed = parseInt(val, 10); if (Number.isNaN(parsed)) throw new Error('Concurrency must be a number'); if (parsed < 1) throw new Error('Concurrency must be greater than 0'); return parsed; } @Option({ flags: '-o, --output <output>', description: 'Specify the output directory.', defaultValue: './sessions', }) private parseOutputOption(val: string) { return val; } @Option({ flags: '--overwrite (-w)', description: 'Overwrite existing sessions.', defaultValue: false, }) private parseOverwriteOption(val: string) { return new CliUtilityService().parseBoolean(val); } @Help('afterAll') private displayExamples() { return ` Examples: create -a accounts.txt -s ./secrets -p proxies.txt create -a username:password -p proxies.txt`; } }
src/commands/create/create-sessions.command.ts
Sadzurami-steam-sessions-creator-97b7294
[ { "filename": "src/commands/validate/validate-sessions.command.ts", "retrieved_chunk": " public async run(args: string[], options: ValidateCommandOptions) {\n try {\n const sessionsOptionInput = await this.normalizeInput(options.sessions);\n const sessions = await this.sessionsImportService.loadSessions(sessionsOptionInput);\n if (sessions.length === 0) throw new Error('No sessions found');\n this.logger.log(`Sessions: ${sessions.length}`);\n await this.validateSessionsService.validateSessions(sessions);\n } catch (error) {\n this.logger.error(error.message);\n }", "score": 0.870503306388855 }, { "filename": "src/modules/accounts-import/accounts-import.service.ts", "retrieved_chunk": " for (const account of accounts) map.set(account.username, account);\n return [...map.values()];\n }\n private async readAccountsFromInput(input: string) {\n const inputType = await this.inferInputType(input);\n if (inputType === 'file') return this.readAccountsFromFile(input);\n if (inputType === 'string') return this.readAccountFromString(input);\n if (inputType === 'directory') return { values: [], errors: [input] };\n }\n private async readAccountsFromFile(filePath: string) {", "score": 0.8106663823127747 }, { "filename": "src/modules/accounts-import/accounts-import.service.ts", "retrieved_chunk": " if (!Array.isArray(input)) input = [input];\n if (input.length === 0) return [];\n let accounts: Account[] = [];\n const errors: string[] = [];\n const readResults = await Promise.all(input.map((input) => this.readAccountsFromInput(input)));\n for (const result of readResults) {\n accounts.push(...result.values);\n errors.push(...result.errors);\n }\n accounts = this.removeDuplicates(accounts);", "score": 0.8103427290916443 }, { "filename": "src/modules/secrets-import/secrets-import.service.ts", "retrieved_chunk": " public async loadSecrets(input: string[] | string) {\n if (!input) return [];\n if (!Array.isArray(input)) input = [input];\n if (input.length === 0) return [];\n let secrets: Secrets[] = [];\n const errors: string[] = [];\n const readResults = await Promise.all(input.map((input) => this.readSecretsFromInput(input)));\n for (const result of readResults) {\n secrets.push(...result.values);\n errors.push(...result.errors);", "score": 0.8065593242645264 }, { "filename": "src/modules/accounts-import/accounts-import.service.ts", "retrieved_chunk": " }\n return accounts;\n }\n public assignSecretsToAccounts(accounts: Account[], secrets: Secrets[]) {\n const secretsMap = new Map<string, Secrets>();\n for (const secret of secrets) {\n secretsMap.set(secret.username, secret);\n // some existing steam-oriented apps are case-insensitive to usernames in secrets\n secretsMap.set(secret.username.toLowerCase(), secret);\n }", "score": 0.7877228260040283 } ]
typescript
const secrets = await this.secretsImportService.loadSecrets(secretsOptionInput);
import fs from 'fs/promises'; import inquirer from 'inquirer'; import pQueue from 'p-queue'; import { setTimeout as delay } from 'timers/promises'; import { Injectable, Logger } from '@nestjs/common'; import { Account as IAccount } from '../../interfaces/account.interface'; import { Secrets } from '../../interfaces/secrets.interface'; class Account implements IAccount { public readonly username: string; public readonly password: string; public sharedSecret: string | null = null; public identitySecret: string | null = null; constructor(account: string) { account = account.trim(); if (account.length === 0) throw new Error('Invalid account'); const parts = account.split(':').map((part) => part.trim()); if (parts.length < 2) throw new Error('Invalid account'); const [username, password, sharedSecret, identitySecret] = parts; this.username = username; this.password = password; if (sharedSecret) this.sharedSecret = sharedSecret; if (identitySecret) this.identitySecret = identitySecret; } } @Injectable() export class AccountsImportService { private readonly logger = new Logger(AccountsImportService.name); private readonly readFilesQueue = new pQueue({ concurrency: 100 }); public async loadAccounts(input: string[] | string) { if (!input) return []; if (!Array.isArray(input)) input = [input]; if (input.length === 0) return []; let accounts: Account[] = []; const errors: string[] = []; const readResults = await Promise.all(input.map((input) => this.readAccountsFromInput(input))); for (const result of readResults) { accounts.push(...result.values); errors.push(...result.errors); } accounts = this.removeDuplicates(accounts); if (errors.length > 0 && accounts.length > 0) { this.logger.warn(`The following account sources are invalid:\n${errors.join('\n')}`); await delay(1000); const { confirm } = await inquirer.prompt({ type: 'confirm', name: 'confirm', message: 'Continue with the valid accounts?', default: false, }); if (!confirm) throw new Error('Aborted by user'); } return accounts; }
public assignSecretsToAccounts(accounts: Account[], secrets: Secrets[]) {
const secretsMap = new Map<string, Secrets>(); for (const secret of secrets) { secretsMap.set(secret.username, secret); // some existing steam-oriented apps are case-insensitive to usernames in secrets secretsMap.set(secret.username.toLowerCase(), secret); } for (const account of accounts) { let secret = secretsMap.get(account.username); if (!secret) secret = secretsMap.get(account.username.toLowerCase()); if (!secret) continue; account.sharedSecret = secret.sharedSecret; account.identitySecret = secret.identitySecret; } } private removeDuplicates(accounts: Account[]) { const map = new Map<string, Account>(); for (const account of accounts) map.set(account.username, account); return [...map.values()]; } private async readAccountsFromInput(input: string) { const inputType = await this.inferInputType(input); if (inputType === 'file') return this.readAccountsFromFile(input); if (inputType === 'string') return this.readAccountFromString(input); if (inputType === 'directory') return { values: [], errors: [input] }; } private async readAccountsFromFile(filePath: string) { const result: { values: Account[]; errors: string[] } = { values: [], errors: [] }; try { let content = await this.readFilesQueue.add(() => fs.readFile(filePath, 'utf-8')); content = content.trim(); if (content.length === 0) throw new Error('Empty file'); // session file if (filePath.endsWith('.steamsession')) { const readResults = this.readAccountFromSessionFile(content); result.values.push(...readResults.values); if (readResults.errors.length > 0) result.errors.push(filePath); return result; } // asf json if (filePath.endsWith('.json') && content.includes('"SteamLogin"')) { const readResults = this.readAccountFromAsfJson(content); result.values.push(...readResults.values); if (readResults.errors.length > 0) result.errors.push(filePath); return result; } // plain text if (content.includes(':')) { const lines = content .split(/\s+|\r?\n/) .map((l) => l.trim()) .filter((l) => l.length > 0); if (lines.length === 0) throw new Error('Empty file'); for (const line of lines) { const readResults = this.readAccountFromString(line); result.values.push(...readResults.values); if (readResults.errors.length > 0) result.errors.push(line); } return result; } throw new Error('Unsupported file format'); } catch (error) { result.errors.push(filePath); } return result; } private readAccountFromString(str: string) { const result: { values: Account[]; errors: string[] } = { values: [], errors: [] }; try { const account = new Account(str); result.values.push(account); } catch (error) { result.errors.push(str); } return result; } private readAccountFromAsfJson(fileContent: string) { const result: { values: Account[]; errors: string[] } = { values: [], errors: [] }; try { const { SteamLogin: username, SteamPassword: password } = JSON.parse(fileContent); if (!username) throw new Error('Invalid username'); if (!password) throw new Error('Invalid password'); const account = new Account(`${username}:${password}`); result.values.push(account); } catch (error) { result.errors.push(fileContent); } return result; } private readAccountFromSessionFile(fileContent: string) { const result: { values: Account[]; errors: string[] } = { values: [], errors: [] }; try { const { Username, Password, SharedSecret, IdentitySecret } = JSON.parse(fileContent); if (!Username) throw new Error('Invalid username'); if (!Password) throw new Error('Invalid password'); const account = new Account(`${Username}:${Password}:${SharedSecret || ''}:${IdentitySecret || ''}`); result.values.push(account); } catch (error) { result.errors.push(fileContent); } return result; } private async inferInputType(input: string) { if (typeof input !== 'string') throw new Error(`Invalid input type: '${typeof input}'`); try { const stats = await fs.stat(input); if (stats.isFile()) return 'file'; if (stats.isDirectory()) return 'directory'; } catch (error) { return 'string'; } } }
src/modules/accounts-import/accounts-import.service.ts
Sadzurami-steam-sessions-creator-97b7294
[ { "filename": "src/commands/create/create-sessions.command.ts", "retrieved_chunk": " description: 'Overwrite existing sessions.',\n defaultValue: false,\n })\n private parseOverwriteOption(val: string) {\n return new CliUtilityService().parseBoolean(val);\n }\n @Help('afterAll')\n private displayExamples() {\n return `\nExamples:", "score": 0.8285399675369263 }, { "filename": "src/modules/secrets-import/secrets-import.service.ts", "retrieved_chunk": " default: false,\n });\n if (!confirm) throw new Error('Aborted by user');\n }\n return secrets;\n }\n private removeDuplicates(secrets: Secrets[]) {\n const map = new Map<string, Secrets>();\n for (const secret of secrets) map.set(secret.username, secret);\n return [...map.values()];", "score": 0.8220633268356323 }, { "filename": "src/commands/create/create-sessions.command.ts", "retrieved_chunk": " private readonly secretsImportService: SecretsImportService,\n private readonly proxiesImportService: ProxiesImportService,\n private readonly proxiesService: ProxiesService,\n ) {\n super();\n }\n public async run(args: string[], options: CreateCommandOptions) {\n try {\n const accountsOptionInput = await this.normalizeInput(options.accounts);\n let accounts = await this.accountsImportService.loadAccounts(accountsOptionInput);", "score": 0.8191404342651367 }, { "filename": "src/commands/create/create-sessions.service.ts", "retrieved_chunk": " constructor(private readonly steamTokensService: SteamTokensService, private readonly configService: ConfigService) {}\n public onModuleInit() {\n this.schemaVersion = this.configService.getOrThrow<number>('session.schemaVersion');\n }\n public async createSession(account: Account) {\n try {\n // we need to wait at least 30 seconds between each refresh token creation\n // because steam has a limit of logins for one account once per 30 seconds\n // probably it's fair only for accounts with 2FA enabled\n const delayMs = 1000 * 31;", "score": 0.8140490055084229 }, { "filename": "src/commands/validate/validate-sessions.command.ts", "retrieved_chunk": " description: `Specify one or more sessions.\nSession can be specified as:\n- A file path to load session from.\n- A glob pattern to load sessions from multiple files.`,\n })\n private parseSessionsOption(val: string, accumulator: string[] = []) {\n accumulator.push(val);\n return accumulator;\n }\n @Help('afterAll')", "score": 0.8089045286178589 } ]
typescript
public assignSecretsToAccounts(accounts: Account[], secrets: Secrets[]) {
import { App, Notice, PluginSettingTab, Setting, debounce } from "obsidian"; import FinDocPlugin from "main"; import { idToText } from "utils"; import loadIcons from "loadIcons"; import { types } from "./constants"; export default class SettingsTab extends PluginSettingTab { plugin: FinDocPlugin; constructor(app: App, plugin: FinDocPlugin) { super(app, plugin); this.plugin = plugin; loadIcons(); } createNewColorBtn(): HTMLElement { const btn = this.containerEl.createEl("button"); btn.classList.add("findoc-btn-margin-bottom"); btn.id = "newColor"; btn.innerText = "Add New Color"; btn.onClickEvent(() => { this.plugin.settings.colors.unshift("#ffffff"); console.debug(this.plugin.settings.colors); this.display(); }); return btn; } display(): void { const { containerEl } = this; containerEl.empty(); containerEl.createEl("h2", { text: "Settings" }); new Setting(containerEl).setName("Support").addButton((button) => { button.buttonEl.innerHTML = "<a style='margin: 0 auto;' href='https://www.buymeacoffee.com/studiowebux'><img width='109px' alt='Buy me a Coffee' src='https://cdn.buymeacoffee.com/buttons/v2/default-yellow.png'/></a>"; button.buttonEl.classList.add("findoc-support-btn"); }); new Setting(containerEl) .setName("CSV Save debounce") .setDesc( "Timeout to trigger the CSV saving process (Value must be greater than 500 and less than 5000)" ) .addText((text) => { text.setValue(this.plugin.settings.debounce.toString()); text.onChange( debounce(async (value: string) => { if ( isNaN(parseInt(value)) || parseInt(value) < 500 || parseInt(value) > 5000 ) { new Notice("Invalid debounce value !"); return; } this.plugin.settings.debounce = value; await this.plugin.saveSettings(); new Notice("Debounce Updated !"); }, 500) ); }); new Setting(containerEl).setName("CSV Separator").addText((text) => { text.setValue(this.plugin.settings.csvSeparator.toString()); text.onChange( debounce(async (value: string) => { this.plugin.settings.csvSeparator = value; await this.plugin.saveSettings(); new Notice("CSV Separator Updated !"); }, 500) ); }); new Setting(containerEl) .setName("Models") .setDesc("Models available (It must be a JSON.stringify version)"); const div = containerEl.createDiv(); div.classList.add("findoc-models-container"); Object.entries(this.plugin.settings.models).forEach(([key, model]) => { const name = idToText(key); const modelSection = div.createDiv(); const el = modelSection.createEl("h2"); el.innerText = name; modelSection.classList.add("findoc-model-section"); new Setting(modelSection) .setName(`Data Source for ${name}`) .addDropdown((dropdown) => { dropdown.addOption( "splitDailyDates", "Split By Daily Dates" ); dropdown.addOption( "splitByYearMonth", "Split By Year & Month" ); dropdown.addOption("splitByYear", "Split By Year"); dropdown.setValue( this.plugin.settings.models[key].dataSource ); dropdown.onChange(async (value) => { this.plugin.settings.models[key].dataSource = value; await this.plugin.saveSettings(); new Notice("Data Source Updated !"); }); }); new Setting(modelSection) .setName(`Output Function for ${name}`) .addDropdown((dropdown) => { dropdown.addOption( "generateSumDataSet", "Generate Sum Dataset" ); dropdown.addOption( "generateDailyDataSet", "Generate Daily Dataset" ); dropdown.addOption( "generateSumDataSetPerTypes", "Generate Sum Dataset Per Types" ); dropdown.setValue(this.plugin.settings.models[key].output); dropdown.onChange(async (value) => { this.plugin.settings.models[key].output = value; await this.plugin.saveSettings(); new Notice("Output Updated !"); }); }); new Setting(modelSection) .setName(`Begin at Zero for ${name}`) .addToggle((toggle) => { toggle.setValue( this.plugin.settings.models[key].beginAtZero ); toggle.onChange(async (value) => { this.plugin.settings.models[key].beginAtZero = value; await this.plugin.saveSettings(); new Notice("Begin at Zero Updated !"); }); }); const h2 = modelSection.createEl("h2"); h2.innerText = `Types for ${name}`; const wrapper = modelSection.createDiv(); wrapper.classList.add("findoc-model-section-wrapper"); const select = wrapper.createEl("select"); select.id = key; select.multiple = true; select.classList.add("findoc-select"); select.setAttribute("value", model.types.join(",")); select.onchange = async () => { const selected = []; // @ts-ignore for (const option of document.getElementById(key).options) { if (option.selected) { selected.push(option.value); } } // select.value = selected.join(","); model.types = selected; await this.plugin.saveSettings(); new Notice("Types Updated !"); };
types.forEach((type: string) => {
const opt = select.createEl("option"); opt.id = type; opt.value = type; opt.innerText = type; opt.selected = model.types.includes(type); }); modelSection.createEl("hr"); }); new Setting(containerEl).setName("Colors"); const colorSection = containerEl.createDiv(); colorSection.appendChild(this.createNewColorBtn()); colorSection.classList.add("findoc-color-section") this.plugin.settings.colors.forEach((color, key) => { new Setting(colorSection) .setName(`Color #${key}`) .addColorPicker(async (colorPicker) => { colorPicker.setValue(color); colorPicker.onChange( debounce(async (value: string) => { this.plugin.settings.colors[key] = value; await this.plugin.saveSettings(); new Notice("Color Updated !"); }, 500) ); }) .addExtraButton((btn) => { btn.setTooltip("Delete Color"); btn.setIcon("trash"); btn.onClick(async () => { this.plugin.settings.colors.splice(key, 1); await this.plugin.saveSettings(); new Notice("Color Deleted !"); this.display(); }); }); }); } }
src/SettingsTab.ts
yet-another-tool-obsidian-findoc-6c84413
[ { "filename": "src/view.ts", "retrieved_chunk": "\t\t\t\t\t})\n\t\t\t\t\t.join(this.plugin.settings.csvSeparator)\n\t\t\t)\n\t\t\t// Clear empty lines\n\t\t\t.filter((r) => r.length !== 0);\n\t\tthis.tableData = [this.tableHeader, ...this.tableData];\n\t\tthis.requestSave();\n\t\t// TODO: Replace this timeout with the proper and recommended way.\n\t\tnew Notice(\"Saving in progress...\", 2005);\n\t\tdebounce(() => {", "score": 0.7523818612098694 }, { "filename": "src/view.ts", "retrieved_chunk": "\t\t\topt.innerText = option;\n\t\t\tif (option === selected) opt.selected = true;\n\t\t\tdropdown.appendChild(opt);\n\t\t});\n\t\treturn dropdown;\n\t}\n\tcreateTable(data: string[]) {\n\t\tthis.div = this.contentEl.createDiv();\n\t\tconst table = this.contentEl.createEl(\"table\");\n\t\t//", "score": 0.7399393916130066 }, { "filename": "src/view.ts", "retrieved_chunk": "\t\tconst dropdown = this.contentEl.createEl(\"select\");\n\t\tdropdown.id = id;\n\t\tdropdown.setAttribute(\"value\", selected);\n\t\tdropdown.onchange = () => {\n\t\t\tdropdown.setAttribute(\"value\", dropdown.value);\n\t\t};\n\t\ttypes.forEach((option: string) => {\n\t\t\tconst opt = this.contentEl.createEl(\"option\");\n\t\t\topt.value = option;\n\t\t\topt.id = id + option.replace(\" \", \"_\");", "score": 0.7273640632629395 }, { "filename": "src/view.ts", "retrieved_chunk": "\t\tthis.refresh();\n\t}\n\trefresh() {\n\t\tthis.div.oninput = debounce(() => {\n\t\t\tthis.saveData();\n\t\t}, parseInt(this.plugin.settings.debounce) || 1000);\n\t}\n\tsaveData() {\n\t\tconst rows = this.div.innerHTML.split(new RegExp(/<tr.*?>/));\n\t\tthis.tableData = rows", "score": 0.7267970442771912 }, { "filename": "src/methods.ts", "retrieved_chunk": "\t\t\t\t\t\t\t\t\ttypes[current.id] += current.value;\n\t\t\t\t\t\t\t\t\treturn types;\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{}\n\t\t\t\t\t\t\t);\n\t\t\t\t\t})\n\t\t\t\t\t.reduce((typeSum, current) => {\n\t\t\t\t\t\tif (current[type]) typeSum.push(current[type]);\n\t\t\t\t\t\telse typeSum.push(0);\n\t\t\t\t\t\treturn typeSum;", "score": 0.7244796752929688 } ]
typescript
types.forEach((type: string) => {
import glob from 'fast-glob'; import { CliUtilityService, Command, CommandRunner, Help, Option } from 'nest-commander'; import pQueue from 'p-queue'; import path from 'path'; import { setTimeout as delay } from 'timers/promises'; import { Logger } from '@nestjs/common'; import { AccountsImportService } from '../../modules/accounts-import/accounts-import.service'; import { ExportSessionsService } from '../../modules/export-sessions/export-sessions.service'; import { ProxiesImportService } from '../../modules/proxies-import/proxies-import.service'; import { ProxiesService } from '../../modules/proxies/proxies.service'; import { SecretsImportService } from '../../modules/secrets-import/secrets-import.service'; import { CreateSessionsService } from './create-sessions.service'; interface CreateCommandOptions { accounts: string | string[]; secrets: string | string[]; proxies: string | string[]; concurrency: number; output: string; overwrite: boolean; } @Command({ name: 'create', description: 'Creates new sessions', }) export class CreateSessionsCommand extends CommandRunner { private readonly logger = new Logger(CreateSessionsCommand.name); constructor( private readonly createSessionsService: CreateSessionsService, private readonly exportSessionsService: ExportSessionsService, private readonly accountsImportService: AccountsImportService, private readonly secretsImportService: SecretsImportService, private readonly proxiesImportService: ProxiesImportService, private readonly proxiesService: ProxiesService, ) { super(); } public async run(args: string[], options: CreateCommandOptions) { try { const accountsOptionInput = await this.normalizeInput(options.accounts); let accounts = await this.accountsImportService.loadAccounts(accountsOptionInput); if (accounts.length === 0) throw new Error('No accounts found'); this.logger.log(`Accounts: ${accounts.length}`); const secretsOptionInput = await this.normalizeInput(options.secrets); const secrets = await this.secretsImportService.loadSecrets(secretsOptionInput); this.logger.log(`Secrets: ${secrets.length}`); const outputOptionInput = options.output; if (!outputOptionInput) throw new Error('Output path is required'); const output = path.resolve(outputOptionInput);
await this.exportSessionsService.setOutputPath(output);
this.logger.log(`Output: ${output}`); const overwriteExistingSessions = options.overwrite; if (!overwriteExistingSessions) { const sessionsPaths = await this.normalizeInput(`${output}/*`); const existingSessions = await this.accountsImportService.loadAccounts(sessionsPaths); this.logger.log(`Ignoring existing sessions: ${existingSessions.length}`); accounts = accounts.filter((account) => !existingSessions.some((a) => a.username === account.username)); } if (accounts.length === 0) { this.logger.log('No accounts to create'); return; } const proxiesOptionInput = await this.normalizeInput(options.proxies); const proxies = await this.proxiesImportService.loadProxies(proxiesOptionInput); this.proxiesService.setProxies(proxies); this.logger.log(`Proxies: ${proxies.length}`); const concurrencyOptionInput = options.concurrency; const concurrency = proxies.length > 0 ? concurrencyOptionInput || Math.min(proxies.length * 3, 100) : 1; this.logger.log(`Concurrency: ${concurrency}`); this.logger.log(`Starting to create sessions for ${accounts.length} accounts`); this.accountsImportService.assignSecretsToAccounts(accounts, secrets); let success = 0; let fails = 0; let left = accounts.length; const queue = new pQueue({ concurrency, interval: 10, intervalCap: 1 }); for (const account of accounts) { queue.add(async () => { try { const session = await this.createSessionsService.createSession(account); await this.exportSessionsService.exportSession(session); success++; this.logger.log(`Success: ${account.username}, left: ${--left}`); } catch (error) { fails++; this.logger.warn(`Fail: ${account.username}, left: ${--left}`); } }); } await queue.onIdle(); this.logger.log(`Session creation complete`); this.logger.log(`Success: ${success}`); this.logger.log(`Fails: ${fails}`); await delay(1000); } catch (error) { this.logger.error(error.message); } } private async normalizeInput(input: string | string[]) { if (!input) return []; if (!Array.isArray(input)) input = [input]; const filteredInput = input.filter((el) => typeof el === 'string' && el.trim() !== ''); if (filteredInput.length === 0) return []; const nestedData = await Promise.all( filteredInput.map(async (el) => { el = el.trim(); // Possible glob pattern const files = await glob(el); if (files.length > 0) return files; // Possible string return el.split(/\s+|\r?\n/).map((line) => line.trim()); }), ); return nestedData.flat(); } @Option({ required: true, flags: '-a, --accounts <accounts...>', description: `Specify one or more accounts. Account can be specified as: - A simple string. - A file path to load accounts from (one account per line). - A glob pattern to load accounts from multiple files. Supported formats: - username:password - username:password:sharedSecret - username:password:sharedSecret:identitySecret - ASF json`, }) private parseAccountsOption(val: string, accumulator: string[] = []) { accumulator.push(val); return accumulator; } @Option({ flags: '-s, --secrets <secrets...>', description: `Specify one or more secrets. Secret can be specified as: - A file path to load secrets from file. - A glob pattern to load secrets from multiple files. Supported formats: - maFile - ASF db`, }) private parseSecretsOption(val: string, accumulator: string[] = []) { accumulator.push(val); return accumulator; } @Option({ flags: '-p, --proxies <proxies...>', description: `Specify one or more proxies. Proxy can be specified as: - A string in the format <protocol>://<username>:<password>@<host>:<port>. - A file path to load proxies from a text file. Supported protocols: - http - https`, }) private parseProxiesOption(val: string, accumulator: string[] = []) { accumulator.push(val); return accumulator; } @Option({ flags: '-c, --concurrency <concurrency>', description: `Specify the number of concurrent runs. Default: 1, or the number of proxies.`, }) private parseConcurrencyOption(val: string) { const parsed = parseInt(val, 10); if (Number.isNaN(parsed)) throw new Error('Concurrency must be a number'); if (parsed < 1) throw new Error('Concurrency must be greater than 0'); return parsed; } @Option({ flags: '-o, --output <output>', description: 'Specify the output directory.', defaultValue: './sessions', }) private parseOutputOption(val: string) { return val; } @Option({ flags: '--overwrite (-w)', description: 'Overwrite existing sessions.', defaultValue: false, }) private parseOverwriteOption(val: string) { return new CliUtilityService().parseBoolean(val); } @Help('afterAll') private displayExamples() { return ` Examples: create -a accounts.txt -s ./secrets -p proxies.txt create -a username:password -p proxies.txt`; } }
src/commands/create/create-sessions.command.ts
Sadzurami-steam-sessions-creator-97b7294
[ { "filename": "src/commands/validate/validate-sessions.command.ts", "retrieved_chunk": " public async run(args: string[], options: ValidateCommandOptions) {\n try {\n const sessionsOptionInput = await this.normalizeInput(options.sessions);\n const sessions = await this.sessionsImportService.loadSessions(sessionsOptionInput);\n if (sessions.length === 0) throw new Error('No sessions found');\n this.logger.log(`Sessions: ${sessions.length}`);\n await this.validateSessionsService.validateSessions(sessions);\n } catch (error) {\n this.logger.error(error.message);\n }", "score": 0.8508365154266357 }, { "filename": "src/modules/accounts-import/accounts-import.service.ts", "retrieved_chunk": " for (const account of accounts) map.set(account.username, account);\n return [...map.values()];\n }\n private async readAccountsFromInput(input: string) {\n const inputType = await this.inferInputType(input);\n if (inputType === 'file') return this.readAccountsFromFile(input);\n if (inputType === 'string') return this.readAccountFromString(input);\n if (inputType === 'directory') return { values: [], errors: [input] };\n }\n private async readAccountsFromFile(filePath: string) {", "score": 0.7712221145629883 }, { "filename": "src/modules/secrets-import/secrets-import.service.ts", "retrieved_chunk": " public async loadSecrets(input: string[] | string) {\n if (!input) return [];\n if (!Array.isArray(input)) input = [input];\n if (input.length === 0) return [];\n let secrets: Secrets[] = [];\n const errors: string[] = [];\n const readResults = await Promise.all(input.map((input) => this.readSecretsFromInput(input)));\n for (const result of readResults) {\n secrets.push(...result.values);\n errors.push(...result.errors);", "score": 0.7643260955810547 }, { "filename": "src/modules/accounts-import/accounts-import.service.ts", "retrieved_chunk": " if (!Array.isArray(input)) input = [input];\n if (input.length === 0) return [];\n let accounts: Account[] = [];\n const errors: string[] = [];\n const readResults = await Promise.all(input.map((input) => this.readAccountsFromInput(input)));\n for (const result of readResults) {\n accounts.push(...result.values);\n errors.push(...result.errors);\n }\n accounts = this.removeDuplicates(accounts);", "score": 0.7635645270347595 }, { "filename": "src/modules/export-sessions/export-sessions.service.ts", "retrieved_chunk": " const serializedSession = this.serializeSession(session);\n const sessionPath = path.resolve(this.outputPath, `${session.username}.${this.fileExtension}`);\n try {\n await fs.writeFile(sessionPath, serializedSession);\n } catch (error) {\n throw new Error('Failed to write session to file', { cause: error });\n }\n }\n private serializeSession(session: Session) {\n const serializedObject = Object.fromEntries(", "score": 0.7579970359802246 } ]
typescript
await this.exportSessionsService.setOutputPath(output);
import glob from 'fast-glob'; import { CliUtilityService, Command, CommandRunner, Help, Option } from 'nest-commander'; import pQueue from 'p-queue'; import path from 'path'; import { setTimeout as delay } from 'timers/promises'; import { Logger } from '@nestjs/common'; import { AccountsImportService } from '../../modules/accounts-import/accounts-import.service'; import { ExportSessionsService } from '../../modules/export-sessions/export-sessions.service'; import { ProxiesImportService } from '../../modules/proxies-import/proxies-import.service'; import { ProxiesService } from '../../modules/proxies/proxies.service'; import { SecretsImportService } from '../../modules/secrets-import/secrets-import.service'; import { CreateSessionsService } from './create-sessions.service'; interface CreateCommandOptions { accounts: string | string[]; secrets: string | string[]; proxies: string | string[]; concurrency: number; output: string; overwrite: boolean; } @Command({ name: 'create', description: 'Creates new sessions', }) export class CreateSessionsCommand extends CommandRunner { private readonly logger = new Logger(CreateSessionsCommand.name); constructor( private readonly createSessionsService: CreateSessionsService, private readonly exportSessionsService: ExportSessionsService, private readonly accountsImportService: AccountsImportService, private readonly secretsImportService: SecretsImportService, private readonly proxiesImportService: ProxiesImportService, private readonly proxiesService: ProxiesService, ) { super(); } public async run(args: string[], options: CreateCommandOptions) { try { const accountsOptionInput = await this.normalizeInput(options.accounts); let accounts = await this.accountsImportService.loadAccounts(accountsOptionInput); if (accounts.length === 0) throw new Error('No accounts found'); this.logger.log(`Accounts: ${accounts.length}`); const secretsOptionInput = await this.normalizeInput(options.secrets); const secrets = await this.secretsImportService.loadSecrets(secretsOptionInput); this.logger.log(`Secrets: ${secrets.length}`); const outputOptionInput = options.output; if (!outputOptionInput) throw new Error('Output path is required'); const output = path.resolve(outputOptionInput); await this.exportSessionsService.setOutputPath(output); this.logger.log(`Output: ${output}`); const overwriteExistingSessions = options.overwrite; if (!overwriteExistingSessions) { const sessionsPaths = await this.normalizeInput(`${output}/*`); const existingSessions = await this.accountsImportService.loadAccounts(sessionsPaths); this.logger.log(`Ignoring existing sessions: ${existingSessions.length}`); accounts = accounts.filter((account) => !existingSessions.some((a) => a.username === account.username)); } if (accounts.length === 0) { this.logger.log('No accounts to create'); return; } const proxiesOptionInput = await this.normalizeInput(options.proxies); const proxies = await this.proxiesImportService.loadProxies(proxiesOptionInput);
this.proxiesService.setProxies(proxies);
this.logger.log(`Proxies: ${proxies.length}`); const concurrencyOptionInput = options.concurrency; const concurrency = proxies.length > 0 ? concurrencyOptionInput || Math.min(proxies.length * 3, 100) : 1; this.logger.log(`Concurrency: ${concurrency}`); this.logger.log(`Starting to create sessions for ${accounts.length} accounts`); this.accountsImportService.assignSecretsToAccounts(accounts, secrets); let success = 0; let fails = 0; let left = accounts.length; const queue = new pQueue({ concurrency, interval: 10, intervalCap: 1 }); for (const account of accounts) { queue.add(async () => { try { const session = await this.createSessionsService.createSession(account); await this.exportSessionsService.exportSession(session); success++; this.logger.log(`Success: ${account.username}, left: ${--left}`); } catch (error) { fails++; this.logger.warn(`Fail: ${account.username}, left: ${--left}`); } }); } await queue.onIdle(); this.logger.log(`Session creation complete`); this.logger.log(`Success: ${success}`); this.logger.log(`Fails: ${fails}`); await delay(1000); } catch (error) { this.logger.error(error.message); } } private async normalizeInput(input: string | string[]) { if (!input) return []; if (!Array.isArray(input)) input = [input]; const filteredInput = input.filter((el) => typeof el === 'string' && el.trim() !== ''); if (filteredInput.length === 0) return []; const nestedData = await Promise.all( filteredInput.map(async (el) => { el = el.trim(); // Possible glob pattern const files = await glob(el); if (files.length > 0) return files; // Possible string return el.split(/\s+|\r?\n/).map((line) => line.trim()); }), ); return nestedData.flat(); } @Option({ required: true, flags: '-a, --accounts <accounts...>', description: `Specify one or more accounts. Account can be specified as: - A simple string. - A file path to load accounts from (one account per line). - A glob pattern to load accounts from multiple files. Supported formats: - username:password - username:password:sharedSecret - username:password:sharedSecret:identitySecret - ASF json`, }) private parseAccountsOption(val: string, accumulator: string[] = []) { accumulator.push(val); return accumulator; } @Option({ flags: '-s, --secrets <secrets...>', description: `Specify one or more secrets. Secret can be specified as: - A file path to load secrets from file. - A glob pattern to load secrets from multiple files. Supported formats: - maFile - ASF db`, }) private parseSecretsOption(val: string, accumulator: string[] = []) { accumulator.push(val); return accumulator; } @Option({ flags: '-p, --proxies <proxies...>', description: `Specify one or more proxies. Proxy can be specified as: - A string in the format <protocol>://<username>:<password>@<host>:<port>. - A file path to load proxies from a text file. Supported protocols: - http - https`, }) private parseProxiesOption(val: string, accumulator: string[] = []) { accumulator.push(val); return accumulator; } @Option({ flags: '-c, --concurrency <concurrency>', description: `Specify the number of concurrent runs. Default: 1, or the number of proxies.`, }) private parseConcurrencyOption(val: string) { const parsed = parseInt(val, 10); if (Number.isNaN(parsed)) throw new Error('Concurrency must be a number'); if (parsed < 1) throw new Error('Concurrency must be greater than 0'); return parsed; } @Option({ flags: '-o, --output <output>', description: 'Specify the output directory.', defaultValue: './sessions', }) private parseOutputOption(val: string) { return val; } @Option({ flags: '--overwrite (-w)', description: 'Overwrite existing sessions.', defaultValue: false, }) private parseOverwriteOption(val: string) { return new CliUtilityService().parseBoolean(val); } @Help('afterAll') private displayExamples() { return ` Examples: create -a accounts.txt -s ./secrets -p proxies.txt create -a username:password -p proxies.txt`; } }
src/commands/create/create-sessions.command.ts
Sadzurami-steam-sessions-creator-97b7294
[ { "filename": "src/commands/validate/validate-sessions.command.ts", "retrieved_chunk": " public async run(args: string[], options: ValidateCommandOptions) {\n try {\n const sessionsOptionInput = await this.normalizeInput(options.sessions);\n const sessions = await this.sessionsImportService.loadSessions(sessionsOptionInput);\n if (sessions.length === 0) throw new Error('No sessions found');\n this.logger.log(`Sessions: ${sessions.length}`);\n await this.validateSessionsService.validateSessions(sessions);\n } catch (error) {\n this.logger.error(error.message);\n }", "score": 0.8471497893333435 }, { "filename": "src/modules/accounts-import/accounts-import.service.ts", "retrieved_chunk": " if (!Array.isArray(input)) input = [input];\n if (input.length === 0) return [];\n let accounts: Account[] = [];\n const errors: string[] = [];\n const readResults = await Promise.all(input.map((input) => this.readAccountsFromInput(input)));\n for (const result of readResults) {\n accounts.push(...result.values);\n errors.push(...result.errors);\n }\n accounts = this.removeDuplicates(accounts);", "score": 0.7866348028182983 }, { "filename": "src/modules/steam-tokens/steam-tokens.service.ts", "retrieved_chunk": " this.throttleConnection(connectionId, this.connectionThrottlingTimeout);\n const loginSessionOptions = {};\n if (proxy) loginSessionOptions[proxy.protocol.includes('socks') ? 'socksProxy' : 'httpProxy'] = proxy.toString();\n const loginSession = new LoginSession(loginSessionPlatform, loginSessionOptions);\n loginSession.on('error', () => {}); // fallback errors handling\n try {\n const credentials = { accountName: account.username, password: account.password } as any;\n if (account.sharedSecret) credentials.steamGuardCode = SteamTotp.getAuthCode(account.sharedSecret);\n loginSession\n .startWithCredentials(credentials)", "score": 0.7805748581886292 }, { "filename": "src/modules/proxies-import/proxies-import.service.ts", "retrieved_chunk": " if (!input) return [];\n if (!Array.isArray(input)) input = [input];\n if (input.length === 0) return [];\n let proxies: Proxy[] = [];\n const errors: string[] = [];\n const readResults = await Promise.all(input.map((input) => this.readProxyFromInput(input)));\n for (const result of readResults) {\n proxies.push(...result.values);\n errors.push(...result.errors);\n }", "score": 0.7766212224960327 }, { "filename": "src/modules/proxies/proxies.service.ts", "retrieved_chunk": " public setProxies(proxies: Proxy[]) {\n if (proxies.length === 0) return;\n for (const proxy of proxies) {\n this.proxies.set(proxy.toString(), proxy);\n }\n }\n public async getProxy(): Promise<Proxy | null> {\n if (this.proxies.size === 0) return null;\n const proxy = await this.proxiesUsageQueue.add(() => this.fetchProxy());\n this.throttleProxy(proxy);", "score": 0.7738924026489258 } ]
typescript
this.proxiesService.setProxies(proxies);
import glob from 'fast-glob'; import { CliUtilityService, Command, CommandRunner, Help, Option } from 'nest-commander'; import pQueue from 'p-queue'; import path from 'path'; import { setTimeout as delay } from 'timers/promises'; import { Logger } from '@nestjs/common'; import { AccountsImportService } from '../../modules/accounts-import/accounts-import.service'; import { ExportSessionsService } from '../../modules/export-sessions/export-sessions.service'; import { ProxiesImportService } from '../../modules/proxies-import/proxies-import.service'; import { ProxiesService } from '../../modules/proxies/proxies.service'; import { SecretsImportService } from '../../modules/secrets-import/secrets-import.service'; import { CreateSessionsService } from './create-sessions.service'; interface CreateCommandOptions { accounts: string | string[]; secrets: string | string[]; proxies: string | string[]; concurrency: number; output: string; overwrite: boolean; } @Command({ name: 'create', description: 'Creates new sessions', }) export class CreateSessionsCommand extends CommandRunner { private readonly logger = new Logger(CreateSessionsCommand.name); constructor( private readonly createSessionsService: CreateSessionsService, private readonly exportSessionsService: ExportSessionsService, private readonly accountsImportService: AccountsImportService, private readonly secretsImportService: SecretsImportService, private readonly proxiesImportService: ProxiesImportService, private readonly proxiesService: ProxiesService, ) { super(); } public async run(args: string[], options: CreateCommandOptions) { try { const accountsOptionInput = await this.normalizeInput(options.accounts); let accounts = await this.accountsImportService.loadAccounts(accountsOptionInput); if (accounts.length === 0) throw new Error('No accounts found'); this.logger.log(`Accounts: ${accounts.length}`); const secretsOptionInput = await this.normalizeInput(options.secrets); const secrets = await this.secretsImportService.loadSecrets(secretsOptionInput); this.logger.log(`Secrets: ${secrets.length}`); const outputOptionInput = options.output; if (!outputOptionInput) throw new Error('Output path is required'); const output = path.resolve(outputOptionInput); await this.exportSessionsService.setOutputPath(output); this.logger.log(`Output: ${output}`); const overwriteExistingSessions = options.overwrite; if (!overwriteExistingSessions) { const sessionsPaths = await this.normalizeInput(`${output}/*`); const existingSessions = await this.accountsImportService.loadAccounts(sessionsPaths); this.logger.log(`Ignoring existing sessions: ${existingSessions.length}`); accounts = accounts.filter((account) => !existingSessions.some((a) => a.username === account.username)); } if (accounts.length === 0) { this.logger.log('No accounts to create'); return; } const proxiesOptionInput = await this.normalizeInput(options.proxies); const proxies = await this.proxiesImportService.loadProxies(proxiesOptionInput); this.proxiesService.setProxies(proxies); this.logger.log(`Proxies: ${proxies.length}`); const concurrencyOptionInput = options.concurrency; const concurrency = proxies.length > 0 ? concurrencyOptionInput || Math.min(proxies.length * 3, 100) : 1; this.logger.log(`Concurrency: ${concurrency}`); this.logger.log(`Starting to create sessions for ${accounts.length} accounts`);
this.accountsImportService.assignSecretsToAccounts(accounts, secrets);
let success = 0; let fails = 0; let left = accounts.length; const queue = new pQueue({ concurrency, interval: 10, intervalCap: 1 }); for (const account of accounts) { queue.add(async () => { try { const session = await this.createSessionsService.createSession(account); await this.exportSessionsService.exportSession(session); success++; this.logger.log(`Success: ${account.username}, left: ${--left}`); } catch (error) { fails++; this.logger.warn(`Fail: ${account.username}, left: ${--left}`); } }); } await queue.onIdle(); this.logger.log(`Session creation complete`); this.logger.log(`Success: ${success}`); this.logger.log(`Fails: ${fails}`); await delay(1000); } catch (error) { this.logger.error(error.message); } } private async normalizeInput(input: string | string[]) { if (!input) return []; if (!Array.isArray(input)) input = [input]; const filteredInput = input.filter((el) => typeof el === 'string' && el.trim() !== ''); if (filteredInput.length === 0) return []; const nestedData = await Promise.all( filteredInput.map(async (el) => { el = el.trim(); // Possible glob pattern const files = await glob(el); if (files.length > 0) return files; // Possible string return el.split(/\s+|\r?\n/).map((line) => line.trim()); }), ); return nestedData.flat(); } @Option({ required: true, flags: '-a, --accounts <accounts...>', description: `Specify one or more accounts. Account can be specified as: - A simple string. - A file path to load accounts from (one account per line). - A glob pattern to load accounts from multiple files. Supported formats: - username:password - username:password:sharedSecret - username:password:sharedSecret:identitySecret - ASF json`, }) private parseAccountsOption(val: string, accumulator: string[] = []) { accumulator.push(val); return accumulator; } @Option({ flags: '-s, --secrets <secrets...>', description: `Specify one or more secrets. Secret can be specified as: - A file path to load secrets from file. - A glob pattern to load secrets from multiple files. Supported formats: - maFile - ASF db`, }) private parseSecretsOption(val: string, accumulator: string[] = []) { accumulator.push(val); return accumulator; } @Option({ flags: '-p, --proxies <proxies...>', description: `Specify one or more proxies. Proxy can be specified as: - A string in the format <protocol>://<username>:<password>@<host>:<port>. - A file path to load proxies from a text file. Supported protocols: - http - https`, }) private parseProxiesOption(val: string, accumulator: string[] = []) { accumulator.push(val); return accumulator; } @Option({ flags: '-c, --concurrency <concurrency>', description: `Specify the number of concurrent runs. Default: 1, or the number of proxies.`, }) private parseConcurrencyOption(val: string) { const parsed = parseInt(val, 10); if (Number.isNaN(parsed)) throw new Error('Concurrency must be a number'); if (parsed < 1) throw new Error('Concurrency must be greater than 0'); return parsed; } @Option({ flags: '-o, --output <output>', description: 'Specify the output directory.', defaultValue: './sessions', }) private parseOutputOption(val: string) { return val; } @Option({ flags: '--overwrite (-w)', description: 'Overwrite existing sessions.', defaultValue: false, }) private parseOverwriteOption(val: string) { return new CliUtilityService().parseBoolean(val); } @Help('afterAll') private displayExamples() { return ` Examples: create -a accounts.txt -s ./secrets -p proxies.txt create -a username:password -p proxies.txt`; } }
src/commands/create/create-sessions.command.ts
Sadzurami-steam-sessions-creator-97b7294
[ { "filename": "src/commands/validate/validate-sessions.command.ts", "retrieved_chunk": " public async run(args: string[], options: ValidateCommandOptions) {\n try {\n const sessionsOptionInput = await this.normalizeInput(options.sessions);\n const sessions = await this.sessionsImportService.loadSessions(sessionsOptionInput);\n if (sessions.length === 0) throw new Error('No sessions found');\n this.logger.log(`Sessions: ${sessions.length}`);\n await this.validateSessionsService.validateSessions(sessions);\n } catch (error) {\n this.logger.error(error.message);\n }", "score": 0.811025857925415 }, { "filename": "src/modules/proxies/proxies.service.ts", "retrieved_chunk": " public setProxies(proxies: Proxy[]) {\n if (proxies.length === 0) return;\n for (const proxy of proxies) {\n this.proxies.set(proxy.toString(), proxy);\n }\n }\n public async getProxy(): Promise<Proxy | null> {\n if (this.proxies.size === 0) return null;\n const proxy = await this.proxiesUsageQueue.add(() => this.fetchProxy());\n this.throttleProxy(proxy);", "score": 0.7564128637313843 }, { "filename": "src/modules/proxies-import/proxies-import.service.ts", "retrieved_chunk": " if (!input) return [];\n if (!Array.isArray(input)) input = [input];\n if (input.length === 0) return [];\n let proxies: Proxy[] = [];\n const errors: string[] = [];\n const readResults = await Promise.all(input.map((input) => this.readProxyFromInput(input)));\n for (const result of readResults) {\n proxies.push(...result.values);\n errors.push(...result.errors);\n }", "score": 0.744815468788147 }, { "filename": "src/modules/steam-tokens/steam-tokens.service.ts", "retrieved_chunk": " this.throttleConnection(connectionId, this.connectionThrottlingTimeout);\n const loginSessionOptions = {};\n if (proxy) loginSessionOptions[proxy.protocol.includes('socks') ? 'socksProxy' : 'httpProxy'] = proxy.toString();\n const loginSession = new LoginSession(loginSessionPlatform, loginSessionOptions);\n loginSession.on('error', () => {}); // fallback errors handling\n try {\n const credentials = { accountName: account.username, password: account.password } as any;\n if (account.sharedSecret) credentials.steamGuardCode = SteamTotp.getAuthCode(account.sharedSecret);\n loginSession\n .startWithCredentials(credentials)", "score": 0.7426370978355408 }, { "filename": "src/modules/proxies-import/proxies-import.service.ts", "retrieved_chunk": " proxies = this.removeDuplicates(proxies);\n if (errors.length > 0) {\n this.logger.warn(`The following proxy sources are invalid:\\n${errors.join('\\n')}`);\n await delay(1000);\n const { confirm } = await inquirer.prompt({\n type: 'confirm',\n name: 'confirm',\n message: proxies.length > 0 ? `Continue with ${proxies.length} valid proxies ?` : 'Continue without proxies ?',\n default: false,\n });", "score": 0.7356477975845337 } ]
typescript
this.accountsImportService.assignSecretsToAccounts(accounts, secrets);
import { beforeEach, describe } from '@jest/globals'; import { ParentNode } from './ParentNode'; import { ChildNode } from './ChildNode'; interface DummyParent extends ParentNode {} /** * */ @ParentNode class DummyParent { /** * * @param _options - dummy options */ // eslint-disable-next-line @typescript-eslint/no-empty-function,@typescript-eslint/no-unused-vars,no-unused-vars constructor(_options?: unknown) {} } interface DummyChild extends ChildNode {} /** * */ @ChildNode class DummyChild { /** * * @param _options - dummy options */ // eslint-disable-next-line @typescript-eslint/no-empty-function,@typescript-eslint/no-unused-vars,no-unused-vars constructor(_options?: unknown) {} } describe('ParentNode and ChildNode integration', () => { describe('child removal', () => { let parent: DummyParent; let child: DummyChild; beforeEach(() => { parent = new DummyParent(); child = new DummyChild({ parent }); }); it('should remove child from parent on child.remove() call', () => { child.remove(); expect(parent.children).not.toContain(child); }); it('should set child\'s parent to null on parent.removeChild() call', () => {
parent.removeChild(child);
expect(child.parent).toBeNull(); }); }); describe('child addition', () => { let parent: DummyParent; let child: DummyChild; beforeEach(() => { parent = new DummyParent(); child = new DummyChild(); }); it('should add child to parent on child.appendTo call', () => { child.appendTo(parent); expect(parent.children).toContain(child); }); it('should set child\'s parent on parent.append() call', () => { parent.append(child); expect(child.parent).toEqual(parent); }); it('should set child\'s parent on parent.insertAfter() call', () => { const anotherChild = new DummyChild(); parent.append(child); parent.insertAfter(child, anotherChild); expect(anotherChild.parent).toEqual(parent); }); }); describe('child transfer from parent to parent', () => { let parent: DummyParent; let anotherParent: DummyParent; let child: DummyChild; beforeEach(() => { parent = new DummyParent(); child = new DummyChild({ parent }); anotherParent = new DummyParent(); }); it('should remove child from the old parent on new parent.append() call', () => { anotherParent.append(child); expect(parent.children).not.toContain(child); }); it('should remove child from the old parent on new parent.insertAfter() call', () => { const anotherChild = new DummyChild({ parent: anotherParent }); anotherParent.insertAfter(anotherChild, child); expect(parent.children).not.toContain(child); }); }); });
src/entities/interfaces/integration.spec.ts
editor-js-document-model-4cb9623
[ { "filename": "src/entities/interfaces/ChildNode.spec.ts", "retrieved_chunk": " dummy = new Dummy({\n parent: parentMock,\n });\n });\n it('should call parent\\'s removeChild method', () => {\n dummy.remove();\n expect(parentMock.removeChild).toBeCalledWith(dummy);\n });\n it('should set node\\'s parent to null', () => {\n dummy.remove();", "score": 0.9344658255577087 }, { "filename": "src/entities/interfaces/ParentNode.spec.ts", "retrieved_chunk": " dummy = new Dummy({\n children: [ childMock ],\n });\n dummy.removeChild(childMock);\n expect(dummy.children).toHaveLength(0);\n });\n it('should call remove method of child', () => {\n const childMock = createChildMock();\n dummy = new Dummy({\n children: [ childMock ],", "score": 0.9000377655029297 }, { "filename": "src/entities/interfaces/ChildNode.spec.ts", "retrieved_chunk": " dummy = new Dummy();\n expect(dummy.parent).toBeNull();\n });\n it('should return parent passed via constructor', () => {\n dummy = new Dummy({ parent: parentMock });\n expect(dummy.parent).toEqual(parentMock);\n });\n });\n describe('.remove()', () => {\n beforeEach(() => {", "score": 0.8952438831329346 }, { "filename": "src/entities/interfaces/ChildNode.spec.ts", "retrieved_chunk": " });\n it('should set node\\'s parent on appendTo call', () => {\n dummy.appendTo(parentMock);\n expect(dummy.parent).toBe(parentMock);\n });\n it('should do nothing if parents are the same', () => {\n const dummyWithParent = new Dummy({\n parent: parentMock,\n });\n jest.resetAllMocks();", "score": 0.8743153810501099 }, { "filename": "src/entities/interfaces/ParentNode.spec.ts", "retrieved_chunk": " expect(dummy.children).toEqual([]);\n });\n it('should return children passed via constructor', () => {\n const childMock = createChildMock();\n dummy = new Dummy({\n children: [ childMock ],\n });\n expect(dummy.children).toEqual([ childMock ]);\n });\n });", "score": 0.8736287951469421 } ]
typescript
parent.removeChild(child);
import pRetry from 'p-retry'; import { setTimeout as delay } from 'timers/promises'; import { Injectable, OnModuleInit } from '@nestjs/common'; import { ConfigService } from '@nestjs/config'; import { Account } from '../../interfaces/account.interface'; import { Session as ISession } from '../../interfaces/session.interface'; import { SteamTokensService } from '../../modules/steam-tokens/steam-tokens.service'; @Injectable() export class CreateSessionsService implements OnModuleInit { private schemaVersion: number; constructor(private readonly steamTokensService: SteamTokensService, private readonly configService: ConfigService) {} public onModuleInit() { this.schemaVersion = this.configService.getOrThrow<number>('session.schemaVersion'); } public async createSession(account: Account) { try { // we need to wait at least 30 seconds between each refresh token creation // because steam has a limit of logins for one account once per 30 seconds // probably it's fair only for accounts with 2FA enabled const delayMs = 1000 * 31; const desktopRefreshToken = await this.createRefreshToken(account, 'desktop'); await delay(delayMs); const webRefreshToken = await this.createRefreshToken(account, 'web'); await delay(delayMs); const mobileRefreshToken = await this.createRefreshToken(account, 'mobile'); await delay(delayMs); const steamId = this.getSteamIdFromRefreshToken(webRefreshToken); const schemaVersion = this.schemaVersion; const session: ISession = { username: account.username, password: account.password, sharedSecret: account.sharedSecret || null, identitySecret: account.identitySecret || null, steamId, webRefreshToken, mobileRefreshToken, desktopRefreshToken, schemaVersion, }; return session; } catch (error) { throw new Error('Failed to create session', { cause: error }); } } private async createRefreshToken(account: Account, platform: 'web' | 'mobile' | 'desktop') { try { return await pRetry(() => this.steamTokensService.createRefreshToken(account, platform), { retries: 3, minTimeout: 31000, maxTimeout: 31000, }); } catch (error) { throw new Error('Failed to create refresh token', { cause: error }); } } private getSteamIdFromRefreshToken(token: string) { try { const { sub: steamId
} = this.steamTokensService.decodeRefreshToken(token);
if (!steamId) throw new Error('SteamId is missing from refresh token'); return steamId; } catch (error) { throw new Error('Failed to get steamId from refresh token', { cause: error }); } } }
src/commands/create/create-sessions.service.ts
Sadzurami-steam-sessions-creator-97b7294
[ { "filename": "src/modules/steam-tokens/steam-tokens.service.ts", "retrieved_chunk": " return JSON.parse(headerJson);\n } catch (error) {\n throw new Error('An error occurred while decoding refresh token', { cause: error });\n }\n }\n public validateRefreshToken(token: string) {\n try {\n const { iss, sub, exp, aud } = this.decodeRefreshToken(token);\n if (!iss || !sub || !exp || !aud) return false;\n if (iss !== 'steam') return false;", "score": 0.9026778936386108 }, { "filename": "src/modules/steam-tokens/steam-tokens.service.ts", "retrieved_chunk": " if (exp < Math.floor(Date.now() / 1000)) return false;\n if (!aud.includes('renew')) return false;\n return true;\n } catch (error) {\n return false;\n }\n }\n public getRefreshTokenExpiration(token: string) {\n try {\n const { exp } = this.decodeRefreshToken(token);", "score": 0.8819108009338379 }, { "filename": "src/modules/steam-tokens/steam-tokens.service.ts", "retrieved_chunk": " .then((result) => result.actionRequired && loginSession.emit('error', new Error('Guard action required')))\n .catch((error) => loginSession.emit('error', error));\n await pEvent(loginSession, 'authenticated', { rejectionEvents: ['error', 'timeout'], timeout: 35000 });\n const refreshToken = loginSession.refreshToken;\n if (!refreshToken) throw new Error('Refresh token is empty');\n return refreshToken;\n } catch (error) {\n if (error.eresult === EResult.RateLimitExceeded) this.throttleConnection(connectionId, 31 * 60 * 1000);\n throw new Error('Failed to create refresh token', { cause: error });\n } finally {", "score": 0.8482816219329834 }, { "filename": "src/modules/accounts-import/accounts-import.service.ts", "retrieved_chunk": " const result: { values: Account[]; errors: string[] } = { values: [], errors: [] };\n try {\n const { SteamLogin: username, SteamPassword: password } = JSON.parse(fileContent);\n if (!username) throw new Error('Invalid username');\n if (!password) throw new Error('Invalid password');\n const account = new Account(`${username}:${password}`);\n result.values.push(account);\n } catch (error) {\n result.errors.push(fileContent);\n }", "score": 0.8229496479034424 }, { "filename": "src/modules/accounts-import/accounts-import.service.ts", "retrieved_chunk": " }\n return result;\n }\n throw new Error('Unsupported file format');\n } catch (error) {\n result.errors.push(filePath);\n }\n return result;\n }\n private readAccountFromString(str: string) {", "score": 0.81023108959198 } ]
typescript
} = this.steamTokensService.decodeRefreshToken(token);
import { beforeEach, describe } from '@jest/globals'; import { ParentNode } from './ParentNode'; import { ChildNode } from './ChildNode'; interface DummyParent extends ParentNode {} /** * */ @ParentNode class DummyParent { /** * * @param _options - dummy options */ // eslint-disable-next-line @typescript-eslint/no-empty-function,@typescript-eslint/no-unused-vars,no-unused-vars constructor(_options?: unknown) {} } interface DummyChild extends ChildNode {} /** * */ @ChildNode class DummyChild { /** * * @param _options - dummy options */ // eslint-disable-next-line @typescript-eslint/no-empty-function,@typescript-eslint/no-unused-vars,no-unused-vars constructor(_options?: unknown) {} } describe('ParentNode and ChildNode integration', () => { describe('child removal', () => { let parent: DummyParent; let child: DummyChild; beforeEach(() => { parent = new DummyParent(); child = new DummyChild({ parent }); }); it('should remove child from parent on child.remove() call', () => { child.remove();
expect(parent.children).not.toContain(child);
}); it('should set child\'s parent to null on parent.removeChild() call', () => { parent.removeChild(child); expect(child.parent).toBeNull(); }); }); describe('child addition', () => { let parent: DummyParent; let child: DummyChild; beforeEach(() => { parent = new DummyParent(); child = new DummyChild(); }); it('should add child to parent on child.appendTo call', () => { child.appendTo(parent); expect(parent.children).toContain(child); }); it('should set child\'s parent on parent.append() call', () => { parent.append(child); expect(child.parent).toEqual(parent); }); it('should set child\'s parent on parent.insertAfter() call', () => { const anotherChild = new DummyChild(); parent.append(child); parent.insertAfter(child, anotherChild); expect(anotherChild.parent).toEqual(parent); }); }); describe('child transfer from parent to parent', () => { let parent: DummyParent; let anotherParent: DummyParent; let child: DummyChild; beforeEach(() => { parent = new DummyParent(); child = new DummyChild({ parent }); anotherParent = new DummyParent(); }); it('should remove child from the old parent on new parent.append() call', () => { anotherParent.append(child); expect(parent.children).not.toContain(child); }); it('should remove child from the old parent on new parent.insertAfter() call', () => { const anotherChild = new DummyChild({ parent: anotherParent }); anotherParent.insertAfter(anotherChild, child); expect(parent.children).not.toContain(child); }); }); });
src/entities/interfaces/integration.spec.ts
editor-js-document-model-4cb9623
[ { "filename": "src/entities/interfaces/ChildNode.spec.ts", "retrieved_chunk": " dummy = new Dummy({\n parent: parentMock,\n });\n });\n it('should call parent\\'s removeChild method', () => {\n dummy.remove();\n expect(parentMock.removeChild).toBeCalledWith(dummy);\n });\n it('should set node\\'s parent to null', () => {\n dummy.remove();", "score": 0.9204248189926147 }, { "filename": "src/entities/interfaces/ParentNode.spec.ts", "retrieved_chunk": " it('should append passed children to new parent', () => {\n const childMock = createChildMock();\n dummy = new Dummy({\n children: [ childMock ],\n });\n expect(childMock.appendTo).toBeCalledWith(dummy);\n });\n });\n describe('.children', () => {\n it('should return empty array by default', () => {", "score": 0.9169782400131226 }, { "filename": "src/entities/interfaces/ChildNode.spec.ts", "retrieved_chunk": " dummy = new Dummy();\n expect(dummy.parent).toBeNull();\n });\n it('should return parent passed via constructor', () => {\n dummy = new Dummy({ parent: parentMock });\n expect(dummy.parent).toEqual(parentMock);\n });\n });\n describe('.remove()', () => {\n beforeEach(() => {", "score": 0.9114897847175598 }, { "filename": "src/entities/interfaces/ParentNode.spec.ts", "retrieved_chunk": " dummy = new Dummy({\n children: [ childMock ],\n });\n dummy.removeChild(childMock);\n expect(dummy.children).toHaveLength(0);\n });\n it('should call remove method of child', () => {\n const childMock = createChildMock();\n dummy = new Dummy({\n children: [ childMock ],", "score": 0.9087690114974976 }, { "filename": "src/entities/interfaces/ChildNode.spec.ts", "retrieved_chunk": " });\n it('should set node\\'s parent on appendTo call', () => {\n dummy.appendTo(parentMock);\n expect(dummy.parent).toBe(parentMock);\n });\n it('should do nothing if parents are the same', () => {\n const dummyWithParent = new Dummy({\n parent: parentMock,\n });\n jest.resetAllMocks();", "score": 0.899721622467041 } ]
typescript
expect(parent.children).not.toContain(child);
import { beforeEach, describe } from '@jest/globals'; import { ParentNode } from './ParentNode'; import { ChildNode } from './ChildNode'; interface DummyParent extends ParentNode {} /** * */ @ParentNode class DummyParent { /** * * @param _options - dummy options */ // eslint-disable-next-line @typescript-eslint/no-empty-function,@typescript-eslint/no-unused-vars,no-unused-vars constructor(_options?: unknown) {} } interface DummyChild extends ChildNode {} /** * */ @ChildNode class DummyChild { /** * * @param _options - dummy options */ // eslint-disable-next-line @typescript-eslint/no-empty-function,@typescript-eslint/no-unused-vars,no-unused-vars constructor(_options?: unknown) {} } describe('ParentNode and ChildNode integration', () => { describe('child removal', () => { let parent: DummyParent; let child: DummyChild; beforeEach(() => { parent = new DummyParent(); child = new DummyChild({ parent }); }); it('should remove child from parent on child.remove() call', () => { child.remove(); expect(parent.children).not.toContain(child); }); it('should set child\'s parent to null on parent.removeChild() call', () => { parent.removeChild(child); expect(child.parent).toBeNull(); }); }); describe('child addition', () => { let parent: DummyParent; let child: DummyChild; beforeEach(() => { parent = new DummyParent(); child = new DummyChild(); }); it('should add child to parent on child.appendTo call', () => {
child.appendTo(parent);
expect(parent.children).toContain(child); }); it('should set child\'s parent on parent.append() call', () => { parent.append(child); expect(child.parent).toEqual(parent); }); it('should set child\'s parent on parent.insertAfter() call', () => { const anotherChild = new DummyChild(); parent.append(child); parent.insertAfter(child, anotherChild); expect(anotherChild.parent).toEqual(parent); }); }); describe('child transfer from parent to parent', () => { let parent: DummyParent; let anotherParent: DummyParent; let child: DummyChild; beforeEach(() => { parent = new DummyParent(); child = new DummyChild({ parent }); anotherParent = new DummyParent(); }); it('should remove child from the old parent on new parent.append() call', () => { anotherParent.append(child); expect(parent.children).not.toContain(child); }); it('should remove child from the old parent on new parent.insertAfter() call', () => { const anotherChild = new DummyChild({ parent: anotherParent }); anotherParent.insertAfter(anotherChild, child); expect(parent.children).not.toContain(child); }); }); });
src/entities/interfaces/integration.spec.ts
editor-js-document-model-4cb9623
[ { "filename": "src/entities/interfaces/ParentNode.spec.ts", "retrieved_chunk": " it('should append passed children to new parent', () => {\n const childMock = createChildMock();\n dummy = new Dummy({\n children: [ childMock ],\n });\n expect(childMock.appendTo).toBeCalledWith(dummy);\n });\n });\n describe('.children', () => {\n it('should return empty array by default', () => {", "score": 0.9206522107124329 }, { "filename": "src/entities/interfaces/ChildNode.spec.ts", "retrieved_chunk": " dummy = new Dummy({\n parent: parentMock,\n });\n });\n it('should call parent\\'s removeChild method', () => {\n dummy.remove();\n expect(parentMock.removeChild).toBeCalledWith(dummy);\n });\n it('should set node\\'s parent to null', () => {\n dummy.remove();", "score": 0.9021696448326111 }, { "filename": "src/entities/interfaces/ChildNode.spec.ts", "retrieved_chunk": " });\n it('should set node\\'s parent on appendTo call', () => {\n dummy.appendTo(parentMock);\n expect(dummy.parent).toBe(parentMock);\n });\n it('should do nothing if parents are the same', () => {\n const dummyWithParent = new Dummy({\n parent: parentMock,\n });\n jest.resetAllMocks();", "score": 0.8977489471435547 }, { "filename": "src/entities/interfaces/ChildNode.spec.ts", "retrieved_chunk": " dummy = new Dummy();\n expect(dummy.parent).toBeNull();\n });\n it('should return parent passed via constructor', () => {\n dummy = new Dummy({ parent: parentMock });\n expect(dummy.parent).toEqual(parentMock);\n });\n });\n describe('.remove()', () => {\n beforeEach(() => {", "score": 0.8938477039337158 }, { "filename": "src/entities/interfaces/ChildNode.spec.ts", "retrieved_chunk": " expect(dummy.parent).toBeNull();\n });\n });\n describe('.appendTo()', () => {\n beforeEach(() => {\n dummy = new Dummy();\n });\n it('should call parent\\'s append method on appendTo call', () => {\n dummy.appendTo(parentMock);\n expect(parentMock.append).toBeCalledWith(dummy);", "score": 0.8815799951553345 } ]
typescript
child.appendTo(parent);
import crypto from "node:crypto"; import http from "node:http"; import https from "node:https"; import path from "node:path"; import { ServerResponse } from "node:http"; import { existsSync, readFileSync } from "node:fs"; import * as nanoid from "nanoid"; import {IncomingForm, Fields, Files} from "formidable"; import { MIME } from "./const"; export function sendJsonResponse(res: ServerResponse, error: object, status: number = 200) { res.writeHead(status, { "Content-type": "application/json", "Access-Control-Allow-Origin": '*', "Access-Control-Allow-Methods": 'GET, POST, PUT, DELETE', "Access-Control-Allow-Headers": 'Content-type, authorization', "Access-Control-Allow-Credentials": "true", }) res.write(JSON.stringify(error), "utf-8"); } export function sendEpubResponse(res: ServerResponse, epubBuffer: Buffer, code?: number) { res.writeHead(code ?? 200, { "Content-type": "application/epub+zip" }); res.write(epubBuffer); } export function uuid(): string { const nid = nanoid.customAlphabet("1234567890abcdef", 10); let id = nid(); return id; } export async function getBufferFromRawURL(resourceUrl: string): Promise<Buffer | null> { let url = new URL(resourceUrl); try { let buffArr: Buffer[] = await new Promise((resolve, reject) => { let func = url.protocol === "https:" ? https : http; func.get(url, (res) => { let data: Buffer[] = []; res.on("data", (d: Buffer) => data.push(d)) res.on("error", reject) res.on("end", () => resolve(data)) }) }) let buffer = Buffer.concat(buffArr); return buffer; } catch (err) { console.error(err); return null; } } export function sendHtmlResponse(res: ServerResponse, html: string, status: number = 200) { res.writeHead(status, { "Content-type": "text/html", }) res.write(html, "utf-8"); } export function parsePostData(req: http.IncomingMessage): Promise<Array<object>> { let form = new IncomingForm({ multiples: false }); return new Promise((resolve, reject) => { form.parse(req, (error, fields: Fields, files: Files) => { if (error) reject(error); resolve([fields, files]); }) }) } export function parseSimplePostData(req: http.IncomingMessage): Promise<Buffer> { return new Promise((resolve, reject) => { let data: Buffer[] = []; req.on("data", (chunk: Buffer) => data.push(chunk)) req.on("end", () => { const buf = Buffer.concat(data); resolve(buf); }); req.on("error", reject); }) } export function md5(data: string): string { return crypto .createHash("md5") .update(data) .digest("hex"); } export function sendPublicFile(res: ServerResponse, filepath: string) { let resourcePath = path.join(__dirname, "../../public", filepath) if (!existsSync(resourcePath)) { // we hope to handle the 404 state on the frontend resourcePath = path.join(__dirname, "../../public", "index.html") } let ext = resourcePath.split('.').pop(); res.
writeHead(200, { "Content-type": MIME[ext] });
res.write(readFileSync(resourcePath)) }
src/common/utils.ts
Aadv1k-quillia-52c5b34
[ { "filename": "src/models/Bucket.ts", "retrieved_chunk": " let p = path.join(this.bucketPath, name);\n console.log(p);\n fs.writeFileSync(p, buffer);\n return p;\n }\n let response = new Promise((resolve, reject) => {\n const writeStream = this.bucket.uploader.upload_stream({\n public_id: name,\n resource_type: \"raw\",\n format: name.split('.').pop() // ideally \"unsafe\" files should not reach this point", "score": 0.7765607833862305 }, { "filename": "src/routes/Issue.ts", "retrieved_chunk": " let targetBook = await BOOK_DB.getBook(requestedBook);\n if (!targetBook) {\n sendJsonResponse(res, ERROR.resourceNotExists, 404);\n return;\n }\n let epubResourcePath = targetBook.path;\n const response: Array<Buffer> = await new Promise((resolve, reject) => {\n https.get(epubResourcePath, (res) => {\n let data: Array<Buffer> = [];\n res.on(\"data\", (d: Buffer) => data.push(d));", "score": 0.7592143416404724 }, { "filename": "src/server.ts", "retrieved_chunk": " } else if (url ===\"/api/signup\") {\n await RouteSignup(req, res);\n } else if (url ===\"/api/login\") {\n await RouteLogin(req, res);\n } else if (url.match(/^\\/api\\/books/)) {\n await RouteBooks(req, res);\n } else if (url.match(/^\\/api\\/issue/)) {\n await RouteIssue(req, res);\n } else {\n sendPublicFile(res, url);", "score": 0.7438968420028687 }, { "filename": "src/server.ts", "retrieved_chunk": "import http from \"node:http\";\nimport { sendPublicFile } from \"./common/utils\";\nimport RouteSignup from \"./routes/Signup\"\nimport RouteLogin from \"./routes/Login\";\nimport RouteBooks from \"./routes/Books\";\nimport RouteIssue from \"./routes/Issue\";\nexport default http.createServer( async (req: http.IncomingMessage, res: http.ServerResponse) => {\n const url: string = new URL(`https://foo.com${req.url}`).pathname;\n if (url === \"/\") {\n sendPublicFile(res, \"index.html\");", "score": 0.7399169206619263 }, { "filename": "src/routes/Issue.ts", "retrieved_chunk": " sendJsonResponse(res, ERROR.resourceNotExists, 404);\n } else {\n sendJsonResponse(res, userIssues, 200);\n }\n }\n } else if (req.method === \"POST\") {\n if (req.headers?.[\"content-type\"] != \"application/json\") {\n sendJsonResponse(res, ERROR.invalidMimeForResource, 415);\n return;\n }", "score": 0.7357221841812134 } ]
typescript
writeHead(200, { "Content-type": MIME[ext] });
import pRetry from 'p-retry'; import { setTimeout as delay } from 'timers/promises'; import { Injectable, OnModuleInit } from '@nestjs/common'; import { ConfigService } from '@nestjs/config'; import { Account } from '../../interfaces/account.interface'; import { Session as ISession } from '../../interfaces/session.interface'; import { SteamTokensService } from '../../modules/steam-tokens/steam-tokens.service'; @Injectable() export class CreateSessionsService implements OnModuleInit { private schemaVersion: number; constructor(private readonly steamTokensService: SteamTokensService, private readonly configService: ConfigService) {} public onModuleInit() { this.schemaVersion = this.configService.getOrThrow<number>('session.schemaVersion'); } public async createSession(account: Account) { try { // we need to wait at least 30 seconds between each refresh token creation // because steam has a limit of logins for one account once per 30 seconds // probably it's fair only for accounts with 2FA enabled const delayMs = 1000 * 31; const desktopRefreshToken = await this.createRefreshToken(account, 'desktop'); await delay(delayMs); const webRefreshToken = await this.createRefreshToken(account, 'web'); await delay(delayMs); const mobileRefreshToken = await this.createRefreshToken(account, 'mobile'); await delay(delayMs); const steamId = this.getSteamIdFromRefreshToken(webRefreshToken); const schemaVersion = this.schemaVersion; const session: ISession = { username: account.username, password: account.password, sharedSecret: account.sharedSecret || null, identitySecret: account.identitySecret || null, steamId, webRefreshToken, mobileRefreshToken, desktopRefreshToken, schemaVersion, }; return session; } catch (error) { throw new Error('Failed to create session', { cause: error }); } } private async createRefreshToken(account: Account, platform: 'web' | 'mobile' | 'desktop') { try { return await pRetry((
) => this.steamTokensService.createRefreshToken(account, platform), {
retries: 3, minTimeout: 31000, maxTimeout: 31000, }); } catch (error) { throw new Error('Failed to create refresh token', { cause: error }); } } private getSteamIdFromRefreshToken(token: string) { try { const { sub: steamId } = this.steamTokensService.decodeRefreshToken(token); if (!steamId) throw new Error('SteamId is missing from refresh token'); return steamId; } catch (error) { throw new Error('Failed to get steamId from refresh token', { cause: error }); } } }
src/commands/create/create-sessions.service.ts
Sadzurami-steam-sessions-creator-97b7294
[ { "filename": "src/modules/steam-tokens/steam-tokens.service.ts", "retrieved_chunk": " return JSON.parse(headerJson);\n } catch (error) {\n throw new Error('An error occurred while decoding refresh token', { cause: error });\n }\n }\n public validateRefreshToken(token: string) {\n try {\n const { iss, sub, exp, aud } = this.decodeRefreshToken(token);\n if (!iss || !sub || !exp || !aud) return false;\n if (iss !== 'steam') return false;", "score": 0.8722646832466125 }, { "filename": "src/modules/steam-tokens/steam-tokens.service.ts", "retrieved_chunk": " private readonly connectionThrottlingTimeout = 31 * 1000;\n constructor(\n @Inject(CACHE_MANAGER) private throttledConnections: Cache,\n private readonly proxiesService: ProxiesService,\n ) {}\n public async createRefreshToken(account: Account, platform: 'web' | 'mobile' | 'desktop') {\n const loginSessionPlatform = this.inferLoginSessionPlatform(platform);\n const proxy = await this.proxiesService.getProxy();\n const connectionId = this.inferConnectionId((proxy || '').toString());\n await this.waitConnectionLimitReset(connectionId);", "score": 0.8644876480102539 }, { "filename": "src/modules/steam-tokens/steam-tokens.service.ts", "retrieved_chunk": " if (exp < Math.floor(Date.now() / 1000)) return false;\n if (!aud.includes('renew')) return false;\n return true;\n } catch (error) {\n return false;\n }\n }\n public getRefreshTokenExpiration(token: string) {\n try {\n const { exp } = this.decodeRefreshToken(token);", "score": 0.8607155084609985 }, { "filename": "src/modules/steam-tokens/steam-tokens.service.ts", "retrieved_chunk": " .then((result) => result.actionRequired && loginSession.emit('error', new Error('Guard action required')))\n .catch((error) => loginSession.emit('error', error));\n await pEvent(loginSession, 'authenticated', { rejectionEvents: ['error', 'timeout'], timeout: 35000 });\n const refreshToken = loginSession.refreshToken;\n if (!refreshToken) throw new Error('Refresh token is empty');\n return refreshToken;\n } catch (error) {\n if (error.eresult === EResult.RateLimitExceeded) this.throttleConnection(connectionId, 31 * 60 * 1000);\n throw new Error('Failed to create refresh token', { cause: error });\n } finally {", "score": 0.8546318411827087 }, { "filename": "src/modules/export-sessions/export-sessions.service.ts", "retrieved_chunk": " if (!directory || typeof directory !== 'string') throw new Error('Invalid output path');\n if (!path.isAbsolute(directory)) throw new Error('Output path must be absolute');\n try {\n await fs.mkdir(directory, { recursive: true });\n } catch (error) {\n throw new Error('Failed to create output directory', { cause: error });\n }\n this.outputPath = directory;\n }\n public async exportSession(session: Session) {", "score": 0.8304024338722229 } ]
typescript
) => this.steamTokensService.createRefreshToken(account, platform), {
import { Client } from "pg"; import { DB as DBConfig } from "../common/const"; import { Book } from "../common/types"; export default class BookModel { private readonly client: Client; constructor() { this.client = new Client({ host: DBConfig.HOST, user: DBConfig.USER, password: DBConfig.PASSWORD, database: DBConfig.DB_NAME, port: DBConfig.PORT, ssl: true }) } async init(): Promise<void> { try { await this.client.connect(); await this.client.query(`CREATE TABLE IF NOT EXISTS books ( id VARCHAR(255) UNIQUE NOT NULL, userid VARCHAR(255) UNIQUE NOT NULL, title VARCHAR(255) NOT NULL, author VARCHAR(255) NOT NULL, signature VARCHAR(255) NOT NULL, path VARCHAR(255) NOT NULL, cover VARCHAR(255) NOT NULL ) `); } catch (error) { throw error; } } async bookExists(bookid: string): Promise<boolean> { const result = await this.client.query("SELECT EXISTS (SELECT 1 FROM books WHERE id = $1)", [bookid]) return result.rows[0].exists }
async getBooks(): Promise<Array<Book> | null> {
try { let response = await this.client.query("SELECT * FROM books"); return response.rows; } catch (error) { console.error(error); return; } } async pushBook(book: Book): Promise<Book | null> { try { await this.client.query(` INSERT INTO books (id, userid, author, title, path, cover, signature) VALUES ($1, $2, $3, $4, $5, $6, $7)`, [book.id, book.userid, book.author, book.title, book.path, book?.cover ?? "", book.signature] ) return book; } catch (error) { console.error(error); return null; } } async deleteBook(bookid: string, userid?: string) { try { await this.client.query(`DELETE FROM books WHERE id = $1 ${userid && "AND userid = $2"}`, [bookid, userid ?? ""]); return bookid; } catch (error) { console.error(error); return null; } } async getBook(bookid: string, sig?: string): Promise<Book | null> { try { const response = await this.client.query(`SELECT * FROM books WHERE ${bookid ? "id = $1" : "signature = $1"}`, [bookid || sig]); return response.rows[0]; } catch (error) { console.error(error); return null; } } async updateBook() { /* TODO */ } async close(): Promise<void> { await this.client.end(); } }
src/models/BookModel.ts
Aadv1k-quillia-52c5b34
[ { "filename": "src/models/UserModel.ts", "retrieved_chunk": " const result = await this.client.query(\"SELECT EXISTS (SELECT 1 FROM users WHERE email = $1)\", [email])\n return result.rows[0].exists\n } \n async getUserByID(id: string): Promise<User | null> {\n try {\n const response = await this.client.query(\n `SELECT * FROM users \n WHERE id = $1`, \n [id]\n );", "score": 0.8974189758300781 }, { "filename": "src/models/UserModel.ts", "retrieved_chunk": " return response.rows[0]\n } catch (error) {\n return null;\n }\n }\n async getUser(email: string, id?: string): Promise<User | null> {\n try {\n const response = await this.client.query(\n `SELECT * FROM users \n WHERE email = $1", "score": 0.8809190988540649 }, { "filename": "src/models/IssueModel.ts", "retrieved_chunk": " }\n }\n async getIssue(\n lenderid: string,\n bookid?: string,\n borrowerid?: string,\n ): Promise<Issue | null> {\n try {\n let response = await this.client.query(\n `SELECT * FROM issues ", "score": 0.8479409217834473 }, { "filename": "src/models/UserModel.ts", "retrieved_chunk": " OR id = $2`, \n [email, id ?? \"\"]\n );\n return response.rows[0]\n } catch (error) {\n return null;\n }\n }\n async getUsers(): Promise <Array<object> | null> {\n try {", "score": 0.8453177213668823 }, { "filename": "src/models/IssueModel.ts", "retrieved_chunk": " async getIssues(borrowerid: string): Promise<Array<Issue> | null> {\n try {\n let response = await this.client.query(\n \"SELECT * FROM issues WHERE borrowerid = $1\",\n [borrowerid]\n );\n return response.rows;\n } catch (error) {\n console.error(error);\n return null;", "score": 0.8424921631813049 } ]
typescript
async getBooks(): Promise<Array<Book> | null> {
import { Client } from "pg"; import { DB as DBConfig } from "../common/const"; import { Issue } from "../common/types"; export default class IssueModel { private readonly client: Client; constructor() { this.client = new Client({ host: DBConfig.HOST, user: DBConfig.USER, password: DBConfig.PASSWORD, database: DBConfig.DB_NAME, port: DBConfig.PORT, ssl: true, }); } async init(): Promise<void> { try { await this.client.connect(); await this.client.query(`CREATE TABLE IF NOT EXISTS issues ( id VARCHAR(255) UNIQUE NOT NULL, lenderid VARCHAR(255) NOT NULL, borrowerid VARCHAR(255) NOT NULL, bookid VARCHAR(255) NOT NULL ) `); } catch (error) { throw error; } } async issueExists(issueid: string): Promise<boolean> { const result = await this.client.query("SELECT EXISTS (SELECT 1 FROM issues WHERE id = $1)", [issueid]) return result.rows[0].exists } async pushIssue
(data: Issue): Promise<Issue | null> {
try { await this.client.query( "INSERT INTO issues (id, lenderid, borrowerid, bookid) VALUES ($1, $2, $3, $4)", [data.id, data.lenderid, data.borrowerid, data.bookid] ); return data; } catch (error) { console.error(error); return null; } } async removeIssue( issueid: string, borrowerid?: string, lenderid?: string ): Promise<void | null> { try { await this.client.query( "DELETE FROM issues WHERE issueid = $1 OR borrowerid = $2 OR lenderid = $3", [issueid ?? "", borrowerid ?? "", lenderid ?? ""] ); } catch (error) { console.error(error); return null; } } async getIssues(borrowerid: string): Promise<Array<Issue> | null> { try { let response = await this.client.query( "SELECT * FROM issues WHERE borrowerid = $1", [borrowerid] ); return response.rows; } catch (error) { console.error(error); return null; } } async getIssue( lenderid: string, bookid?: string, borrowerid?: string, ): Promise<Issue | null> { try { let response = await this.client.query( `SELECT * FROM issues WHERE borrowerid = $1 AND bookid = $2 `, [borrowerid ?? null, bookid ?? null] ); return response.rows[0]; } catch (error) { console.error(error); return null; } } async close(): Promise<void> { await this.client.end(); } }
src/models/IssueModel.ts
Aadv1k-quillia-52c5b34
[ { "filename": "src/models/BookModel.ts", "retrieved_chunk": " }\n }\n async bookExists(bookid: string): Promise<boolean> {\n const result = await this.client.query(\"SELECT EXISTS (SELECT 1 FROM books WHERE id = $1)\", [bookid])\n return result.rows[0].exists\n } \n async getBooks(): Promise<Array<Book> | null> {\n try {\n let response = await this.client.query(\"SELECT * FROM books\");\n return response.rows;", "score": 0.8849772214889526 }, { "filename": "src/models/UserModel.ts", "retrieved_chunk": " const result = await this.client.query(\"SELECT EXISTS (SELECT 1 FROM users WHERE email = $1)\", [email])\n return result.rows[0].exists\n } \n async getUserByID(id: string): Promise<User | null> {\n try {\n const response = await this.client.query(\n `SELECT * FROM users \n WHERE id = $1`, \n [id]\n );", "score": 0.8835096955299377 }, { "filename": "src/models/UserModel.ts", "retrieved_chunk": " return response.rows[0]\n } catch (error) {\n return null;\n }\n }\n async getUser(email: string, id?: string): Promise<User | null> {\n try {\n const response = await this.client.query(\n `SELECT * FROM users \n WHERE email = $1", "score": 0.8659282326698303 }, { "filename": "src/models/UserModel.ts", "retrieved_chunk": " } catch (error) {\n throw error;\n }\n }\n async pushTokenForUser(token: string, userid: string): Promise<void | null> {\n try {\n await this.client.query(\"INSERT INTO tokens (userid, token) VALUES ($1, $2)\", [userid, token]);\n } catch (error) {\n console.error(error);\n return null;", "score": 0.8497030138969421 }, { "filename": "src/models/UserModel.ts", "retrieved_chunk": " }\n }\n async deleteTokenForUser(token?: string, userid?: string): Promise<void | null> {\n try {\n await this.client.query(\"DELETE FROM tokens WHERE token = $1 OR userid = $2\", [token, userid]);\n } catch (error) {\n console.error(error);\n return null;\n }\n }", "score": 0.8356866240501404 } ]
typescript
(data: Issue): Promise<Issue | null> {
import BookModel from "../models/BookModel"; import Bucket from "../models/Bucket"; import Token from "../lib/GenerateToken"; import { ERROR, MAX_EPUB_SIZE_MB } from "../common/const"; import { TokStatus, Book } from "../common/types"; import { sendJsonResponse, parseSimplePostData, md5, uuid, } from "../common/utils"; import filetype from "file-type-cjs"; import fs from "node:fs"; import EPub from "epub"; import os from "node:os"; import path from "node:path"; import crypto from "node:crypto"; import { exec } from "node:child_process"; import http from "node:http"; async function getEpubCoverFromEpubFile_UNIX( epubFilepath: string ): Promise<[Buffer, string] | null> { let randomString = crypto.randomBytes(16).toString("hex"); let tempDir = path.join(os.tmpdir(), `tmp-${randomString}`); fs.mkdirSync(tempDir); let unzipCMD = `unzip -q ${epubFilepath} -d ${tempDir}`; let unzipCMDExec = new Promise((resolve, reject) => { exec(unzipCMD, (err: any, stdout: any, stderr: any) => { if (err) reject(err); resolve(stdout); }); }); try { await unzipCMDExec; } catch (err) { console.error(err); fs.rmSync(tempDir, { recursive: true }); // we r good boys! return null; } let findCMD = `find ${tempDir} -type f \\( -iname \\*.jpeg -o -iname \\*.jpg -o -iname \\*.png \\) | grep -Ei 'cover\\.|index-1_1'`; let findCMDExec: Promise<string> = new Promise((resolve, reject) => { exec(findCMD, (err: any, stdout: any, stderr: any) => { if (err) reject(err); resolve(stdout); }); }); let selectedFilePath: string; try { selectedFilePath = await findCMDExec; selectedFilePath = selectedFilePath.trim(); } catch (err) { console.error(err); fs.rmSync(tempDir, { recursive: true }); // we r good boys! return null; } let ret: [Buffer, string] = [ Buffer.from(fs.readFileSync(selectedFilePath)), selectedFilePath, ]; fs.rmSync(tempDir, { recursive: true }); // we r good boys! return ret; } export default async function ( req: http.IncomingMessage, res: http.ServerResponse ) { const BOOK_DB = new BookModel(); const BUCKET
= new Bucket();
await BOOK_DB.init(); await BUCKET.init(); try { if (req.method === "GET") { try { let userBooks = await BOOK_DB.getBooks(); userBooks = userBooks.map((e) => { delete e.path; return e; }); sendJsonResponse(res, userBooks, 200); } catch (error) { console.error(error); sendJsonResponse(res, ERROR.internalErr); } } else if (req.method === "POST") { const authorization = req.headers?.authorization; const authToken = authorization?.split(" ")?.pop(); if (!authorization || !authToken) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const token = new Token(); const tokenStatus: TokStatus = token.verify(authToken); if ( tokenStatus === TokStatus.INVALID || tokenStatus === TokStatus.INVALID_SIG ) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const parsedAuthToken: any = token.UNSAFE_parse(authToken); let epubBuffer: Buffer; epubBuffer = await parseSimplePostData(req); let epubSizeInMB = Math.ceil(epubBuffer.length / 1e6); let bufferMime = await filetype.fromBuffer(epubBuffer); if (bufferMime.mime != "application/epub+zip") { sendJsonResponse(res, ERROR.invalidMimeForResource, 415); return; } if (epubSizeInMB > MAX_EPUB_SIZE_MB) { sendJsonResponse(res, ERROR.fileTooLarge, 400); return; } let randomString = crypto.randomBytes(16).toString("hex"); const tempEpubFilePath = path.join(os.tmpdir(), `tmp-${randomString}.epub`); fs.writeFileSync(tempEpubFilePath, epubBuffer); const epub: any = await new Promise((resolve, reject) => { const epub = new EPub(tempEpubFilePath); epub.on("end", () => resolve(epub)); epub.on("error", reject); epub.parse(); }); let epubCoverBuffer = await getEpubCoverFromEpubFile_UNIX(tempEpubFilePath); console.log(epubCoverBuffer); let epubSignature = md5(epubBuffer.toString("hex")); let foundBook = await BOOK_DB.getBook("", epubSignature); if (foundBook) { sendJsonResponse( res, { ...ERROR.resourceExists, data: { id: foundBook.id, }, }, 409 ); return; } let epubFilePermalink = await BUCKET.pushBufferWithName( epubBuffer, `${epubSignature}.epub` ); let epubCoverPermalink = null; if (epubCoverBuffer) { epubCoverPermalink = await BUCKET.pushBufferWithName( epubCoverBuffer[0], `${epubSignature}.${epubCoverBuffer[1].split(".").pop()}` ); } let epubID = uuid(); let epubEntry: Book = { id: epubID, userid: parsedAuthToken.id, title: epub.metadata?.title ?? epubID.split("-").pop(), author: epub.metadata?.creator ?? parsedAuthToken.email, path: epubFilePermalink, signature: epubSignature, cover: epubCoverPermalink, }; const pushed = await BOOK_DB.pushBook(epubEntry); if (!pushed) { sendJsonResponse(res, ERROR.internalErr, 500); return; } sendJsonResponse( res, { error: null, message: `successfully published a book of id ${epubEntry.id}`, data: { id: epubEntry.id, }, }, 201 ); } else if (req.method === "DELETE") { const authorization = req.headers?.authorization; const authToken = authorization?.split(" ")?.pop(); if (!authorization || !authToken) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const token = new Token(); const tokenStatus: TokStatus = token.verify(authToken); if ( tokenStatus === TokStatus.INVALID || tokenStatus === TokStatus.INVALID_SIG ) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const parsedAuthToken: any = token.UNSAFE_parse(authToken); let body: Buffer; body = await parseSimplePostData(req); let data: any; try { data = JSON.parse(body.toString()); } catch { sendJsonResponse(res, ERROR.invalidJSONData, 400); return; } if (!data.bookid) { sendJsonResponse(res, ERROR.badRequest, 400); return; } let bookDeleted = await BOOK_DB.deleteBook(data.bookid, parsedAuthToken.id); if (!bookDeleted) { sendJsonResponse(res, { error: "unable-to-delete-book", message: `was unable to delete book ${data.bookid}, perhaps the id was invalid?`, status: 404 }, 404) return; } sendJsonResponse(res, { error: null, message: `successfully deleted book of id ${data.bookid}`, status: 204, data: { id: data.bookid, } }, 204) } } finally { await BOOK_DB.close(); } }
src/routes/Books.ts
Aadv1k-quillia-52c5b34
[ { "filename": "src/models/BookModel.ts", "retrieved_chunk": " database: DBConfig.DB_NAME,\n port: DBConfig.PORT,\n ssl: true\n })\n }\n async init(): Promise<void> {\n try {\n await this.client.connect();\n await this.client.query(`CREATE TABLE IF NOT EXISTS books (\n id VARCHAR(255) UNIQUE NOT NULL,", "score": 0.8393207788467407 }, { "filename": "src/models/UserModel.ts", "retrieved_chunk": " database: DBConfig.DB_NAME,\n port: DBConfig.PORT,\n ssl: true\n })\n }\n async init(): Promise<void> {\n try {\n await this.client.connect();\n await this.client.query(`\n CREATE TABLE IF NOT EXISTS users (", "score": 0.8337470293045044 }, { "filename": "src/models/Bucket.ts", "retrieved_chunk": " this.bucket = cloudinary;\n }\n }\n async init() {\n if (this.isLocal) {\n await new Promise((_, _a) => {\n fs.mkdir(this.bucketPath, (_b) => {});\n }) \n }\n // syntactical consistency", "score": 0.8228440284729004 }, { "filename": "src/models/IssueModel.ts", "retrieved_chunk": " database: DBConfig.DB_NAME,\n port: DBConfig.PORT,\n ssl: true,\n });\n }\n async init(): Promise<void> {\n try {\n await this.client.connect();\n await this.client.query(`CREATE TABLE IF NOT EXISTS issues (\n id VARCHAR(255) UNIQUE NOT NULL,", "score": 0.8206356763839722 }, { "filename": "src/common/utils.ts", "retrieved_chunk": " })\n })\n let buffer = Buffer.concat(buffArr);\n return buffer;\n } catch (err) {\n console.error(err);\n return null;\n }\n} \nexport function sendHtmlResponse(res: ServerResponse, html: string, status: number = 200) {", "score": 0.8179565668106079 } ]
typescript
= new Bucket();
import BookModel from "../models/BookModel"; import Bucket from "../models/Bucket"; import Token from "../lib/GenerateToken"; import { ERROR, MAX_EPUB_SIZE_MB } from "../common/const"; import { TokStatus, Book } from "../common/types"; import { sendJsonResponse, parseSimplePostData, md5, uuid, } from "../common/utils"; import filetype from "file-type-cjs"; import fs from "node:fs"; import EPub from "epub"; import os from "node:os"; import path from "node:path"; import crypto from "node:crypto"; import { exec } from "node:child_process"; import http from "node:http"; async function getEpubCoverFromEpubFile_UNIX( epubFilepath: string ): Promise<[Buffer, string] | null> { let randomString = crypto.randomBytes(16).toString("hex"); let tempDir = path.join(os.tmpdir(), `tmp-${randomString}`); fs.mkdirSync(tempDir); let unzipCMD = `unzip -q ${epubFilepath} -d ${tempDir}`; let unzipCMDExec = new Promise((resolve, reject) => { exec(unzipCMD, (err: any, stdout: any, stderr: any) => { if (err) reject(err); resolve(stdout); }); }); try { await unzipCMDExec; } catch (err) { console.error(err); fs.rmSync(tempDir, { recursive: true }); // we r good boys! return null; } let findCMD = `find ${tempDir} -type f \\( -iname \\*.jpeg -o -iname \\*.jpg -o -iname \\*.png \\) | grep -Ei 'cover\\.|index-1_1'`; let findCMDExec: Promise<string> = new Promise((resolve, reject) => { exec(findCMD, (err: any, stdout: any, stderr: any) => { if (err) reject(err); resolve(stdout); }); }); let selectedFilePath: string; try { selectedFilePath = await findCMDExec; selectedFilePath = selectedFilePath.trim(); } catch (err) { console.error(err); fs.rmSync(tempDir, { recursive: true }); // we r good boys! return null; } let ret: [Buffer, string] = [ Buffer.from(fs.readFileSync(selectedFilePath)), selectedFilePath, ]; fs.rmSync(tempDir, { recursive: true }); // we r good boys! return ret; } export default async function ( req: http.IncomingMessage, res: http.ServerResponse ) { const BOOK_DB = new BookModel(); const BUCKET = new Bucket(); await BOOK_DB.init(); await BUCKET.init(); try { if (req.method === "GET") { try { let userBooks = await BOOK_DB.getBooks(); userBooks = userBooks.map((e) => { delete e.path; return e; }); sendJsonResponse(res, userBooks, 200); } catch (error) { console.error(error); sendJsonResponse(
res, ERROR.internalErr);
} } else if (req.method === "POST") { const authorization = req.headers?.authorization; const authToken = authorization?.split(" ")?.pop(); if (!authorization || !authToken) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const token = new Token(); const tokenStatus: TokStatus = token.verify(authToken); if ( tokenStatus === TokStatus.INVALID || tokenStatus === TokStatus.INVALID_SIG ) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const parsedAuthToken: any = token.UNSAFE_parse(authToken); let epubBuffer: Buffer; epubBuffer = await parseSimplePostData(req); let epubSizeInMB = Math.ceil(epubBuffer.length / 1e6); let bufferMime = await filetype.fromBuffer(epubBuffer); if (bufferMime.mime != "application/epub+zip") { sendJsonResponse(res, ERROR.invalidMimeForResource, 415); return; } if (epubSizeInMB > MAX_EPUB_SIZE_MB) { sendJsonResponse(res, ERROR.fileTooLarge, 400); return; } let randomString = crypto.randomBytes(16).toString("hex"); const tempEpubFilePath = path.join(os.tmpdir(), `tmp-${randomString}.epub`); fs.writeFileSync(tempEpubFilePath, epubBuffer); const epub: any = await new Promise((resolve, reject) => { const epub = new EPub(tempEpubFilePath); epub.on("end", () => resolve(epub)); epub.on("error", reject); epub.parse(); }); let epubCoverBuffer = await getEpubCoverFromEpubFile_UNIX(tempEpubFilePath); console.log(epubCoverBuffer); let epubSignature = md5(epubBuffer.toString("hex")); let foundBook = await BOOK_DB.getBook("", epubSignature); if (foundBook) { sendJsonResponse( res, { ...ERROR.resourceExists, data: { id: foundBook.id, }, }, 409 ); return; } let epubFilePermalink = await BUCKET.pushBufferWithName( epubBuffer, `${epubSignature}.epub` ); let epubCoverPermalink = null; if (epubCoverBuffer) { epubCoverPermalink = await BUCKET.pushBufferWithName( epubCoverBuffer[0], `${epubSignature}.${epubCoverBuffer[1].split(".").pop()}` ); } let epubID = uuid(); let epubEntry: Book = { id: epubID, userid: parsedAuthToken.id, title: epub.metadata?.title ?? epubID.split("-").pop(), author: epub.metadata?.creator ?? parsedAuthToken.email, path: epubFilePermalink, signature: epubSignature, cover: epubCoverPermalink, }; const pushed = await BOOK_DB.pushBook(epubEntry); if (!pushed) { sendJsonResponse(res, ERROR.internalErr, 500); return; } sendJsonResponse( res, { error: null, message: `successfully published a book of id ${epubEntry.id}`, data: { id: epubEntry.id, }, }, 201 ); } else if (req.method === "DELETE") { const authorization = req.headers?.authorization; const authToken = authorization?.split(" ")?.pop(); if (!authorization || !authToken) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const token = new Token(); const tokenStatus: TokStatus = token.verify(authToken); if ( tokenStatus === TokStatus.INVALID || tokenStatus === TokStatus.INVALID_SIG ) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const parsedAuthToken: any = token.UNSAFE_parse(authToken); let body: Buffer; body = await parseSimplePostData(req); let data: any; try { data = JSON.parse(body.toString()); } catch { sendJsonResponse(res, ERROR.invalidJSONData, 400); return; } if (!data.bookid) { sendJsonResponse(res, ERROR.badRequest, 400); return; } let bookDeleted = await BOOK_DB.deleteBook(data.bookid, parsedAuthToken.id); if (!bookDeleted) { sendJsonResponse(res, { error: "unable-to-delete-book", message: `was unable to delete book ${data.bookid}, perhaps the id was invalid?`, status: 404 }, 404) return; } sendJsonResponse(res, { error: null, message: `successfully deleted book of id ${data.bookid}`, status: 204, data: { id: data.bookid, } }, 204) } } finally { await BOOK_DB.close(); } }
src/routes/Books.ts
Aadv1k-quillia-52c5b34
[ { "filename": "src/routes/Login.ts", "retrieved_chunk": " } catch(error) {\n sendJsonResponse(res, ERROR.invalidJSONData, 400)\n return;\n }\n await DB.init();\n const foundUser: User = await DB.getUser(parsedData.email);\n await DB.close();\n if (!foundUser) {\n sendJsonResponse(res, ERROR.userNotFound, 404);\n return;", "score": 0.8493356704711914 }, { "filename": "src/routes/Signup.ts", "retrieved_chunk": " sendJsonResponse(res, ERROR.badRequest, 400);\n return;\n }\n await DB.init();\n let foundUser = await DB.getUser(parsedData.email);\n if (foundUser) {\n sendJsonResponse(res, ERROR.userAlreadyExists, 409)\n return;\n }\n let user: User = {", "score": 0.830159068107605 }, { "filename": "src/routes/Issue.ts", "retrieved_chunk": " res.on(\"end\", () => resolve(data));\n res.on(\"error\", (error) => reject(error));\n });\n });\n let epubBuffer = Buffer.concat(response);\n sendEpubResponse(res, epubBuffer);\n return;\n } else {\n let userIssues = await ISSUE_DB.getIssues(parsedAuthToken.id);\n if (!userIssues) {", "score": 0.828079342842102 }, { "filename": "src/routes/Issue.ts", "retrieved_chunk": " let targetBook = await BOOK_DB.getBook(requestedBook);\n if (!targetBook) {\n sendJsonResponse(res, ERROR.resourceNotExists, 404);\n return;\n }\n let epubResourcePath = targetBook.path;\n const response: Array<Buffer> = await new Promise((resolve, reject) => {\n https.get(epubResourcePath, (res) => {\n let data: Array<Buffer> = [];\n res.on(\"data\", (d: Buffer) => data.push(d));", "score": 0.8256171941757202 }, { "filename": "src/routes/Issue.ts", "retrieved_chunk": " let issueData: Issue;\n try {\n let issuePostData = await parseSimplePostData(req);\n issueData = JSON.parse(issuePostData.toString());\n } catch (error) {\n console.error(error);\n sendJsonResponse(res, ERROR.badRequest, 400);\n return;\n }\n if (!issueData.lenderid || !issueData.bookid) {", "score": 0.8173756003379822 } ]
typescript
res, ERROR.internalErr);
import { setTimeout as delay } from 'timers/promises'; import { Injectable, Logger } from '@nestjs/common'; import { ConfigService } from '@nestjs/config'; import { Session } from '../../interfaces/session.interface'; import { SteamTokensService } from '../../modules/steam-tokens/steam-tokens.service'; @Injectable() export class ValidateSessionsService { private readonly logger = new Logger(ValidateSessionsService.name); constructor(private readonly steamTokensService: SteamTokensService, private readonly configService: ConfigService) {} public async validateSessions(sessions: Session[]) { const valid: Session[] = []; const invalid: Session[] = []; for (const session of sessions) { const { valid: isValid, errors, expires } = await this.validateSession(session); if (isValid) { valid.push(session); this.logger.log( `Valid: ${session.username}, days: ${Math.floor((expires - Date.now()) / (24 * 60 * 60 * 1000))}`, ); } else { invalid.push(session); this.logger.warn(`Invalid: ${session.username}, errors: ${errors.join(', ')}`); } } if (invalid.length > 0) { this.logger.warn(`Invalid sessions:\n${invalid.map((session) => session.username).join('\n')}`); } await delay(1000); } private async validateSession(session: Session) { const errors: string[] = []; let expires = Date.now(); if (!session) errors.push('Invalid session'); if (session.schemaVersion !== this.configService.getOrThrow<number>('session.schemaVersion')) { errors.push('Outdated schema version'); } if (!session.username) errors.push('Invalid username');
if (!session.password) errors.push('Invalid password');
if (!session.steamId) errors.push('Invalid steamId'); if (!session.hasOwnProperty('sharedSecret')) errors.push('Invalid shared Secret'); if (!session.hasOwnProperty('identitySecret')) errors.push('Invalid identity Secret'); if (session.desktopRefreshToken) { if (!this.steamTokensService.validateRefreshToken(session.desktopRefreshToken)) { errors.push('Invalid desktop refresh token'); } const tokenExpiration = this.steamTokensService.getRefreshTokenExpiration(session.desktopRefreshToken); if (tokenExpiration > expires) expires = tokenExpiration; } if (session.mobileRefreshToken) { if (!this.steamTokensService.validateRefreshToken(session.mobileRefreshToken)) { errors.push('Invalid mobile refresh token'); } const tokenExpiration = this.steamTokensService.getRefreshTokenExpiration(session.mobileRefreshToken); if (tokenExpiration > expires) expires = tokenExpiration; } if (session.webRefreshToken) { if (!this.steamTokensService.validateRefreshToken(session.webRefreshToken)) { errors.push('Invalid web refresh token'); } const tokenExpiration = this.steamTokensService.getRefreshTokenExpiration(session.webRefreshToken); if (tokenExpiration > expires) expires = tokenExpiration; } if (expires < Date.now()) errors.push('Expired session'); return { valid: errors.length === 0, errors, expires }; } }
src/commands/validate/validate-sessions.service.ts
Sadzurami-steam-sessions-creator-97b7294
[ { "filename": "src/modules/accounts-import/accounts-import.service.ts", "retrieved_chunk": " const result: { values: Account[]; errors: string[] } = { values: [], errors: [] };\n try {\n const { SteamLogin: username, SteamPassword: password } = JSON.parse(fileContent);\n if (!username) throw new Error('Invalid username');\n if (!password) throw new Error('Invalid password');\n const account = new Account(`${username}:${password}`);\n result.values.push(account);\n } catch (error) {\n result.errors.push(fileContent);\n }", "score": 0.8131706714630127 }, { "filename": "src/modules/accounts-import/accounts-import.service.ts", "retrieved_chunk": " return result;\n }\n private readAccountFromSessionFile(fileContent: string) {\n const result: { values: Account[]; errors: string[] } = { values: [], errors: [] };\n try {\n const { Username, Password, SharedSecret, IdentitySecret } = JSON.parse(fileContent);\n if (!Username) throw new Error('Invalid username');\n if (!Password) throw new Error('Invalid password');\n const account = new Account(`${Username}:${Password}:${SharedSecret || ''}:${IdentitySecret || ''}`);\n result.values.push(account);", "score": 0.7984459400177002 }, { "filename": "src/modules/accounts-import/accounts-import.service.ts", "retrieved_chunk": " public sharedSecret: string | null = null;\n public identitySecret: string | null = null;\n constructor(account: string) {\n account = account.trim();\n if (account.length === 0) throw new Error('Invalid account');\n const parts = account.split(':').map((part) => part.trim());\n if (parts.length < 2) throw new Error('Invalid account');\n const [username, password, sharedSecret, identitySecret] = parts;\n this.username = username;\n this.password = password;", "score": 0.7961271405220032 }, { "filename": "src/commands/validate/validate-sessions.command.ts", "retrieved_chunk": " public async run(args: string[], options: ValidateCommandOptions) {\n try {\n const sessionsOptionInput = await this.normalizeInput(options.sessions);\n const sessions = await this.sessionsImportService.loadSessions(sessionsOptionInput);\n if (sessions.length === 0) throw new Error('No sessions found');\n this.logger.log(`Sessions: ${sessions.length}`);\n await this.validateSessionsService.validateSessions(sessions);\n } catch (error) {\n this.logger.error(error.message);\n }", "score": 0.7955697774887085 }, { "filename": "src/modules/export-sessions/export-sessions.service.ts", "retrieved_chunk": " const serializedSession = this.serializeSession(session);\n const sessionPath = path.resolve(this.outputPath, `${session.username}.${this.fileExtension}`);\n try {\n await fs.writeFile(sessionPath, serializedSession);\n } catch (error) {\n throw new Error('Failed to write session to file', { cause: error });\n }\n }\n private serializeSession(session: Session) {\n const serializedObject = Object.fromEntries(", "score": 0.773784339427948 } ]
typescript
if (!session.password) errors.push('Invalid password');
import { TokStatus } from "../common/types"; import { JWT } from "../common/const" import { createHmac } from "node:crypto"; export default class Token { generate(user: object, expiresIn?: number): string { const head = { algorithm: JWT.ALGO, typ: "JWT"}; const createdAt = Math.floor(Date.now() / 1000); const body = { ...user, iat: createdAt, exp: null} if (expiresIn) { body.exp = createdAt + expiresIn; } let b64Head = Buffer.from(JSON.stringify(head)).toString("base64").replace(/=/g, ""); let b64Body = Buffer.from(JSON.stringify(body)).toString("base64").replace(/=/g, ""); let signature = this.sign(`${b64Head}.${b64Body}`); return `${b64Head}.${b64Body}.${signature}` } verify(token: string): TokStatus { let [head, body, signature] = token.split('.'); if (!head || !body || !signature) { return TokStatus.INVALID; } if (this.sign(`${head}.${body}`) !== signature) { return TokStatus.INVALID_SIG } let decodedBody = Buffer.from(body, "base64").toString("utf-8"); const curTime = Math.floor(Date.now() / 1000); if (JSON.parse(decodedBody)?.exp > curTime) { return TokStatus.EXPIRED; } return TokStatus.VALID } // assumes that the token is valid UNSAFE_parse(token: string): object { const [ _a, body, _b ] = token.split("."); const parsedBody = Buffer.from(body, "base64").toString("utf-8"); const parsedJson = JSON.parse(parsedBody); return parsedJson; } private sign(data: string): string { return createHmac(JWT.
HASH, JWT.SECRET) .update(data) .digest("base64") .replace(/=/g, '') }
}
src/lib/GenerateToken.ts
Aadv1k-quillia-52c5b34
[ { "filename": "src/common/utils.ts", "retrieved_chunk": " resolve(buf);\n });\n req.on(\"error\", reject);\n })\n}\nexport function md5(data: string): string {\n return crypto\n .createHash(\"md5\")\n .update(data)\n .digest(\"hex\");", "score": 0.808538556098938 }, { "filename": "src/routes/Books.ts", "retrieved_chunk": " }\n const parsedAuthToken: any = token.UNSAFE_parse(authToken);\n let body: Buffer;\n body = await parseSimplePostData(req);\n let data: any;\n try {\n data = JSON.parse(body.toString());\n } catch {\n sendJsonResponse(res, ERROR.invalidJSONData, 400);\n return;", "score": 0.7343435883522034 }, { "filename": "src/routes/Login.ts", "retrieved_chunk": " const DB = new UserModel();\n let data: any = await parseSimplePostData(req);\n data = data.toString();\n if (req.method !== \"POST\") {\n sendJsonResponse(res, ERROR.methodNotAllowed, 405);\n return;\n }\n let parsedData: User;\n try {\n parsedData = JSON.parse(data);", "score": 0.6825299263000488 }, { "filename": "src/routes/Books.ts", "retrieved_chunk": " const token = new Token();\n const tokenStatus: TokStatus = token.verify(authToken);\n if (\n tokenStatus === TokStatus.INVALID ||\n tokenStatus === TokStatus.INVALID_SIG\n ) {\n sendJsonResponse(res, ERROR.unauthorized, 401);\n return;\n }\n const parsedAuthToken: any = token.UNSAFE_parse(authToken);", "score": 0.6815183162689209 }, { "filename": "src/routes/Books.ts", "retrieved_chunk": " console.error(error);\n sendJsonResponse(res, ERROR.internalErr);\n }\n } else if (req.method === \"POST\") {\n const authorization = req.headers?.authorization;\n const authToken = authorization?.split(\" \")?.pop();\n if (!authorization || !authToken) {\n sendJsonResponse(res, ERROR.unauthorized, 401);\n return;\n }", "score": 0.679455578327179 } ]
typescript
HASH, JWT.SECRET) .update(data) .digest("base64") .replace(/=/g, '') }
import { setTimeout as delay } from 'timers/promises'; import { Injectable, Logger } from '@nestjs/common'; import { ConfigService } from '@nestjs/config'; import { Session } from '../../interfaces/session.interface'; import { SteamTokensService } from '../../modules/steam-tokens/steam-tokens.service'; @Injectable() export class ValidateSessionsService { private readonly logger = new Logger(ValidateSessionsService.name); constructor(private readonly steamTokensService: SteamTokensService, private readonly configService: ConfigService) {} public async validateSessions(sessions: Session[]) { const valid: Session[] = []; const invalid: Session[] = []; for (const session of sessions) { const { valid: isValid, errors, expires } = await this.validateSession(session); if (isValid) { valid.push(session); this.logger.log( `Valid: ${session.username}, days: ${Math.floor((expires - Date.now()) / (24 * 60 * 60 * 1000))}`, ); } else { invalid.push(session); this.logger.warn(`Invalid: ${session.username}, errors: ${errors.join(', ')}`); } } if (invalid.length > 0) { this.logger.warn(`Invalid sessions:\n${invalid.map((session) => session.username).join('\n')}`); } await delay(1000); } private async validateSession(session: Session) { const errors: string[] = []; let expires = Date.now(); if (!session) errors.push('Invalid session'); if (session.schemaVersion !== this.configService.getOrThrow<number>('session.schemaVersion')) { errors.push('Outdated schema version'); } if (!session.username) errors.push('Invalid username'); if (!session.password) errors.push('Invalid password'); if (!session.steamId) errors.push('Invalid steamId'); if (!session.hasOwnProperty('sharedSecret')) errors.push('Invalid shared Secret'); if (!session.hasOwnProperty('identitySecret')) errors.push('Invalid identity Secret');
if (session.desktopRefreshToken) {
if (!this.steamTokensService.validateRefreshToken(session.desktopRefreshToken)) { errors.push('Invalid desktop refresh token'); } const tokenExpiration = this.steamTokensService.getRefreshTokenExpiration(session.desktopRefreshToken); if (tokenExpiration > expires) expires = tokenExpiration; } if (session.mobileRefreshToken) { if (!this.steamTokensService.validateRefreshToken(session.mobileRefreshToken)) { errors.push('Invalid mobile refresh token'); } const tokenExpiration = this.steamTokensService.getRefreshTokenExpiration(session.mobileRefreshToken); if (tokenExpiration > expires) expires = tokenExpiration; } if (session.webRefreshToken) { if (!this.steamTokensService.validateRefreshToken(session.webRefreshToken)) { errors.push('Invalid web refresh token'); } const tokenExpiration = this.steamTokensService.getRefreshTokenExpiration(session.webRefreshToken); if (tokenExpiration > expires) expires = tokenExpiration; } if (expires < Date.now()) errors.push('Expired session'); return { valid: errors.length === 0, errors, expires }; } }
src/commands/validate/validate-sessions.service.ts
Sadzurami-steam-sessions-creator-97b7294
[ { "filename": "src/modules/accounts-import/accounts-import.service.ts", "retrieved_chunk": " const result: { values: Account[]; errors: string[] } = { values: [], errors: [] };\n try {\n const { SteamLogin: username, SteamPassword: password } = JSON.parse(fileContent);\n if (!username) throw new Error('Invalid username');\n if (!password) throw new Error('Invalid password');\n const account = new Account(`${username}:${password}`);\n result.values.push(account);\n } catch (error) {\n result.errors.push(fileContent);\n }", "score": 0.7606154680252075 }, { "filename": "src/modules/accounts-import/accounts-import.service.ts", "retrieved_chunk": " return result;\n }\n private readAccountFromSessionFile(fileContent: string) {\n const result: { values: Account[]; errors: string[] } = { values: [], errors: [] };\n try {\n const { Username, Password, SharedSecret, IdentitySecret } = JSON.parse(fileContent);\n if (!Username) throw new Error('Invalid username');\n if (!Password) throw new Error('Invalid password');\n const account = new Account(`${Username}:${Password}:${SharedSecret || ''}:${IdentitySecret || ''}`);\n result.values.push(account);", "score": 0.7423003911972046 }, { "filename": "src/modules/accounts-import/accounts-import.service.ts", "retrieved_chunk": " const result: { values: Account[]; errors: string[] } = { values: [], errors: [] };\n try {\n let content = await this.readFilesQueue.add(() => fs.readFile(filePath, 'utf-8'));\n content = content.trim();\n if (content.length === 0) throw new Error('Empty file');\n // session file\n if (filePath.endsWith('.steamsession')) {\n const readResults = this.readAccountFromSessionFile(content);\n result.values.push(...readResults.values);\n if (readResults.errors.length > 0) result.errors.push(filePath);", "score": 0.7281314134597778 }, { "filename": "src/modules/steam-tokens/steam-tokens.service.ts", "retrieved_chunk": " this.throttleConnection(connectionId, this.connectionThrottlingTimeout);\n const loginSessionOptions = {};\n if (proxy) loginSessionOptions[proxy.protocol.includes('socks') ? 'socksProxy' : 'httpProxy'] = proxy.toString();\n const loginSession = new LoginSession(loginSessionPlatform, loginSessionOptions);\n loginSession.on('error', () => {}); // fallback errors handling\n try {\n const credentials = { accountName: account.username, password: account.password } as any;\n if (account.sharedSecret) credentials.steamGuardCode = SteamTotp.getAuthCode(account.sharedSecret);\n loginSession\n .startWithCredentials(credentials)", "score": 0.719677209854126 }, { "filename": "src/modules/accounts-import/accounts-import.service.ts", "retrieved_chunk": " public sharedSecret: string | null = null;\n public identitySecret: string | null = null;\n constructor(account: string) {\n account = account.trim();\n if (account.length === 0) throw new Error('Invalid account');\n const parts = account.split(':').map((part) => part.trim());\n if (parts.length < 2) throw new Error('Invalid account');\n const [username, password, sharedSecret, identitySecret] = parts;\n this.username = username;\n this.password = password;", "score": 0.7140531539916992 } ]
typescript
if (session.desktopRefreshToken) {
import BookModel from "../models/BookModel"; import Bucket from "../models/Bucket"; import Token from "../lib/GenerateToken"; import { ERROR, MAX_EPUB_SIZE_MB } from "../common/const"; import { TokStatus, Book } from "../common/types"; import { sendJsonResponse, parseSimplePostData, md5, uuid, } from "../common/utils"; import filetype from "file-type-cjs"; import fs from "node:fs"; import EPub from "epub"; import os from "node:os"; import path from "node:path"; import crypto from "node:crypto"; import { exec } from "node:child_process"; import http from "node:http"; async function getEpubCoverFromEpubFile_UNIX( epubFilepath: string ): Promise<[Buffer, string] | null> { let randomString = crypto.randomBytes(16).toString("hex"); let tempDir = path.join(os.tmpdir(), `tmp-${randomString}`); fs.mkdirSync(tempDir); let unzipCMD = `unzip -q ${epubFilepath} -d ${tempDir}`; let unzipCMDExec = new Promise((resolve, reject) => { exec(unzipCMD, (err: any, stdout: any, stderr: any) => { if (err) reject(err); resolve(stdout); }); }); try { await unzipCMDExec; } catch (err) { console.error(err); fs.rmSync(tempDir, { recursive: true }); // we r good boys! return null; } let findCMD = `find ${tempDir} -type f \\( -iname \\*.jpeg -o -iname \\*.jpg -o -iname \\*.png \\) | grep -Ei 'cover\\.|index-1_1'`; let findCMDExec: Promise<string> = new Promise((resolve, reject) => { exec(findCMD, (err: any, stdout: any, stderr: any) => { if (err) reject(err); resolve(stdout); }); }); let selectedFilePath: string; try { selectedFilePath = await findCMDExec; selectedFilePath = selectedFilePath.trim(); } catch (err) { console.error(err); fs.rmSync(tempDir, { recursive: true }); // we r good boys! return null; } let ret: [Buffer, string] = [ Buffer.from(fs.readFileSync(selectedFilePath)), selectedFilePath, ]; fs.rmSync(tempDir, { recursive: true }); // we r good boys! return ret; } export default async function ( req: http.IncomingMessage, res: http.ServerResponse ) { const BOOK_DB = new BookModel(); const BUCKET = new Bucket(); await BOOK_DB.init(); await BUCKET.init(); try { if (req.method === "GET") { try { let userBooks = await BOOK_DB.getBooks(); userBooks = userBooks.map((e) => { delete e.path; return e; }); sendJsonResponse(res, userBooks, 200); } catch (error) { console.error(error); sendJsonResponse(res, ERROR.internalErr); } } else if (req.method === "POST") { const authorization = req.headers?.authorization; const authToken = authorization?.split(" ")?.pop(); if (!authorization || !authToken) { sendJsonResponse(res, ERROR.unauthorized, 401); return; }
const token = new Token();
const tokenStatus: TokStatus = token.verify(authToken); if ( tokenStatus === TokStatus.INVALID || tokenStatus === TokStatus.INVALID_SIG ) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const parsedAuthToken: any = token.UNSAFE_parse(authToken); let epubBuffer: Buffer; epubBuffer = await parseSimplePostData(req); let epubSizeInMB = Math.ceil(epubBuffer.length / 1e6); let bufferMime = await filetype.fromBuffer(epubBuffer); if (bufferMime.mime != "application/epub+zip") { sendJsonResponse(res, ERROR.invalidMimeForResource, 415); return; } if (epubSizeInMB > MAX_EPUB_SIZE_MB) { sendJsonResponse(res, ERROR.fileTooLarge, 400); return; } let randomString = crypto.randomBytes(16).toString("hex"); const tempEpubFilePath = path.join(os.tmpdir(), `tmp-${randomString}.epub`); fs.writeFileSync(tempEpubFilePath, epubBuffer); const epub: any = await new Promise((resolve, reject) => { const epub = new EPub(tempEpubFilePath); epub.on("end", () => resolve(epub)); epub.on("error", reject); epub.parse(); }); let epubCoverBuffer = await getEpubCoverFromEpubFile_UNIX(tempEpubFilePath); console.log(epubCoverBuffer); let epubSignature = md5(epubBuffer.toString("hex")); let foundBook = await BOOK_DB.getBook("", epubSignature); if (foundBook) { sendJsonResponse( res, { ...ERROR.resourceExists, data: { id: foundBook.id, }, }, 409 ); return; } let epubFilePermalink = await BUCKET.pushBufferWithName( epubBuffer, `${epubSignature}.epub` ); let epubCoverPermalink = null; if (epubCoverBuffer) { epubCoverPermalink = await BUCKET.pushBufferWithName( epubCoverBuffer[0], `${epubSignature}.${epubCoverBuffer[1].split(".").pop()}` ); } let epubID = uuid(); let epubEntry: Book = { id: epubID, userid: parsedAuthToken.id, title: epub.metadata?.title ?? epubID.split("-").pop(), author: epub.metadata?.creator ?? parsedAuthToken.email, path: epubFilePermalink, signature: epubSignature, cover: epubCoverPermalink, }; const pushed = await BOOK_DB.pushBook(epubEntry); if (!pushed) { sendJsonResponse(res, ERROR.internalErr, 500); return; } sendJsonResponse( res, { error: null, message: `successfully published a book of id ${epubEntry.id}`, data: { id: epubEntry.id, }, }, 201 ); } else if (req.method === "DELETE") { const authorization = req.headers?.authorization; const authToken = authorization?.split(" ")?.pop(); if (!authorization || !authToken) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const token = new Token(); const tokenStatus: TokStatus = token.verify(authToken); if ( tokenStatus === TokStatus.INVALID || tokenStatus === TokStatus.INVALID_SIG ) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const parsedAuthToken: any = token.UNSAFE_parse(authToken); let body: Buffer; body = await parseSimplePostData(req); let data: any; try { data = JSON.parse(body.toString()); } catch { sendJsonResponse(res, ERROR.invalidJSONData, 400); return; } if (!data.bookid) { sendJsonResponse(res, ERROR.badRequest, 400); return; } let bookDeleted = await BOOK_DB.deleteBook(data.bookid, parsedAuthToken.id); if (!bookDeleted) { sendJsonResponse(res, { error: "unable-to-delete-book", message: `was unable to delete book ${data.bookid}, perhaps the id was invalid?`, status: 404 }, 404) return; } sendJsonResponse(res, { error: null, message: `successfully deleted book of id ${data.bookid}`, status: 204, data: { id: data.bookid, } }, 204) } } finally { await BOOK_DB.close(); } }
src/routes/Books.ts
Aadv1k-quillia-52c5b34
[ { "filename": "src/routes/Issue.ts", "retrieved_chunk": " const BOOK_DB = new BookModel();\n const USER_DB = new UserModel();\n const authorization = req.headers?.authorization;\n const authToken = authorization?.split(\" \")?.pop()?.trim();\n try {\n if (req.method === \"OPTIONS\") {\n sendJsonResponse(res, {}, 200);\n return;\n }\n if (!authorization || !authToken) {", "score": 0.9325792193412781 }, { "filename": "src/routes/Issue.ts", "retrieved_chunk": " sendJsonResponse(res, ERROR.unauthorized, 401);\n return;\n }\n const token = new Token();\n const tokenStatus: TokStatus = token.verify(authToken);\n if (\n tokenStatus === TokStatus.INVALID ||\n tokenStatus === TokStatus.INVALID_SIG\n ) {\n sendJsonResponse(res, ERROR.unauthorized, 401);", "score": 0.8688471913337708 }, { "filename": "src/routes/Issue.ts", "retrieved_chunk": " sendJsonResponse(res, ERROR.resourceNotExists, 404);\n } else {\n sendJsonResponse(res, userIssues, 200);\n }\n }\n } else if (req.method === \"POST\") {\n if (req.headers?.[\"content-type\"] != \"application/json\") {\n sendJsonResponse(res, ERROR.invalidMimeForResource, 415);\n return;\n }", "score": 0.8608791828155518 }, { "filename": "src/routes/Login.ts", "retrieved_chunk": " const DB = new UserModel();\n let data: any = await parseSimplePostData(req);\n data = data.toString();\n if (req.method !== \"POST\") {\n sendJsonResponse(res, ERROR.methodNotAllowed, 405);\n return;\n }\n let parsedData: User;\n try {\n parsedData = JSON.parse(data);", "score": 0.8451359272003174 }, { "filename": "src/routes/Signup.ts", "retrieved_chunk": ") {\n const DB = new UserModel();\n if (req.method !== \"POST\") {\n sendJsonResponse(res, ERROR.methodNotAllowed, 405);\n return;\n }\n let data: any = await parseSimplePostData(req);\n data = data.toString();\n let parsedData: User;\n try {", "score": 0.841723620891571 } ]
typescript
const token = new Token();
import http from "node:http"; import { sendJsonResponse, md5, uuid, parseSimplePostData } from "../common/utils"; import { ERROR } from "../common/const"; import { User } from "../common/types"; import UserModel from "../models/UserModel"; import Token from "../lib/GenerateToken"; import isEmailValid from "../lib/isEmailValid"; export default async function ( req: http.IncomingMessage, res: http.ServerResponse ) { const DB = new UserModel(); if (req.method !== "POST") { sendJsonResponse(res, ERROR.methodNotAllowed, 405); return; } let data: any = await parseSimplePostData(req); data = data.toString(); let parsedData: User; try { parsedData = JSON.parse(data === "" ? '{}' : data); } catch { sendJsonResponse(res, ERROR.invalidJSONData, 400) return; } if (!parsedData.email || !parsedData.password) { sendJsonResponse(res, ERROR.badRequest, 400); return; } if (!isEmailValid(parsedData.email)) { sendJsonResponse(res, ERROR.badRequest, 400); return; } await DB.init(); let foundUser = await DB.getUser(parsedData.email); if (foundUser) { sendJsonResponse(res, ERROR.userAlreadyExists, 409) return; } let user: User = { id: uuid(), email: parsedData.email, password:
md5(parsedData.password), } const token = new Token();
let pushed = await DB.pushUser(user) const { password, ...tokenBody} = user; let accessToken = token.generate(tokenBody); if (pushed !== null) { sendJsonResponse(res, { status: 201, message: "successfully created new user", error: null, token: accessToken, data: { email: user.email, id: user.id } }, 201) } else { sendJsonResponse(res, ERROR.internalErr, 500); } await DB.close(); }
src/routes/Signup.ts
Aadv1k-quillia-52c5b34
[ { "filename": "src/routes/Login.ts", "retrieved_chunk": " }\n if (md5(parsedData.password) !== foundUser.password) {\n sendJsonResponse(res, ERROR.unauthorized, 401);\n return;\n }\n const token = new Token();\n const { password, ...tokenBody} = foundUser;\n let accessToken = token.generate(tokenBody);\n sendJsonResponse(res, {\n messaged: \"found the given user\",", "score": 0.8842193484306335 }, { "filename": "src/routes/Login.ts", "retrieved_chunk": " status: 200,\n error: null,\n token: accessToken,\n data: {\n email: foundUser.email,\n id: foundUser.id,\n }\n }, 200)\n}", "score": 0.8348435163497925 }, { "filename": "src/routes/Login.ts", "retrieved_chunk": " } catch(error) {\n sendJsonResponse(res, ERROR.invalidJSONData, 400)\n return;\n }\n await DB.init();\n const foundUser: User = await DB.getUser(parsedData.email);\n await DB.close();\n if (!foundUser) {\n sendJsonResponse(res, ERROR.userNotFound, 404);\n return;", "score": 0.811930239200592 }, { "filename": "src/routes/Login.ts", "retrieved_chunk": " const DB = new UserModel();\n let data: any = await parseSimplePostData(req);\n data = data.toString();\n if (req.method !== \"POST\") {\n sendJsonResponse(res, ERROR.methodNotAllowed, 405);\n return;\n }\n let parsedData: User;\n try {\n parsedData = JSON.parse(data);", "score": 0.8050146102905273 }, { "filename": "src/routes/Books.ts", "retrieved_chunk": " id: epubEntry.id,\n },\n },\n 201\n );\n } else if (req.method === \"DELETE\") {\n const authorization = req.headers?.authorization;\n const authToken = authorization?.split(\" \")?.pop();\n if (!authorization || !authToken) {\n sendJsonResponse(res, ERROR.unauthorized, 401);", "score": 0.803197979927063 } ]
typescript
md5(parsedData.password), } const token = new Token();
import BookModel from "../models/BookModel"; import Bucket from "../models/Bucket"; import Token from "../lib/GenerateToken"; import { ERROR, MAX_EPUB_SIZE_MB } from "../common/const"; import { TokStatus, Book } from "../common/types"; import { sendJsonResponse, parseSimplePostData, md5, uuid, } from "../common/utils"; import filetype from "file-type-cjs"; import fs from "node:fs"; import EPub from "epub"; import os from "node:os"; import path from "node:path"; import crypto from "node:crypto"; import { exec } from "node:child_process"; import http from "node:http"; async function getEpubCoverFromEpubFile_UNIX( epubFilepath: string ): Promise<[Buffer, string] | null> { let randomString = crypto.randomBytes(16).toString("hex"); let tempDir = path.join(os.tmpdir(), `tmp-${randomString}`); fs.mkdirSync(tempDir); let unzipCMD = `unzip -q ${epubFilepath} -d ${tempDir}`; let unzipCMDExec = new Promise((resolve, reject) => { exec(unzipCMD, (err: any, stdout: any, stderr: any) => { if (err) reject(err); resolve(stdout); }); }); try { await unzipCMDExec; } catch (err) { console.error(err); fs.rmSync(tempDir, { recursive: true }); // we r good boys! return null; } let findCMD = `find ${tempDir} -type f \\( -iname \\*.jpeg -o -iname \\*.jpg -o -iname \\*.png \\) | grep -Ei 'cover\\.|index-1_1'`; let findCMDExec: Promise<string> = new Promise((resolve, reject) => { exec(findCMD, (err: any, stdout: any, stderr: any) => { if (err) reject(err); resolve(stdout); }); }); let selectedFilePath: string; try { selectedFilePath = await findCMDExec; selectedFilePath = selectedFilePath.trim(); } catch (err) { console.error(err); fs.rmSync(tempDir, { recursive: true }); // we r good boys! return null; } let ret: [Buffer, string] = [ Buffer.from(fs.readFileSync(selectedFilePath)), selectedFilePath, ]; fs.rmSync(tempDir, { recursive: true }); // we r good boys! return ret; } export default async function ( req: http.IncomingMessage, res: http.ServerResponse ) { const BOOK_DB = new BookModel(); const BUCKET = new Bucket(); await BOOK_DB.init(); await BUCKET.init(); try { if (req.method === "GET") { try { let userBooks = await BOOK_DB.getBooks(); userBooks = userBooks.map((e) => { delete e.path; return e; }); sendJsonResponse(res, userBooks, 200); } catch (error) { console.error(error); sendJsonResponse(res, ERROR.internalErr); } } else if (req.method === "POST") { const authorization = req.headers?.authorization; const authToken = authorization?.split(" ")?.pop(); if (!authorization || !authToken) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const token = new Token();
const tokenStatus: TokStatus = token.verify(authToken);
if ( tokenStatus === TokStatus.INVALID || tokenStatus === TokStatus.INVALID_SIG ) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const parsedAuthToken: any = token.UNSAFE_parse(authToken); let epubBuffer: Buffer; epubBuffer = await parseSimplePostData(req); let epubSizeInMB = Math.ceil(epubBuffer.length / 1e6); let bufferMime = await filetype.fromBuffer(epubBuffer); if (bufferMime.mime != "application/epub+zip") { sendJsonResponse(res, ERROR.invalidMimeForResource, 415); return; } if (epubSizeInMB > MAX_EPUB_SIZE_MB) { sendJsonResponse(res, ERROR.fileTooLarge, 400); return; } let randomString = crypto.randomBytes(16).toString("hex"); const tempEpubFilePath = path.join(os.tmpdir(), `tmp-${randomString}.epub`); fs.writeFileSync(tempEpubFilePath, epubBuffer); const epub: any = await new Promise((resolve, reject) => { const epub = new EPub(tempEpubFilePath); epub.on("end", () => resolve(epub)); epub.on("error", reject); epub.parse(); }); let epubCoverBuffer = await getEpubCoverFromEpubFile_UNIX(tempEpubFilePath); console.log(epubCoverBuffer); let epubSignature = md5(epubBuffer.toString("hex")); let foundBook = await BOOK_DB.getBook("", epubSignature); if (foundBook) { sendJsonResponse( res, { ...ERROR.resourceExists, data: { id: foundBook.id, }, }, 409 ); return; } let epubFilePermalink = await BUCKET.pushBufferWithName( epubBuffer, `${epubSignature}.epub` ); let epubCoverPermalink = null; if (epubCoverBuffer) { epubCoverPermalink = await BUCKET.pushBufferWithName( epubCoverBuffer[0], `${epubSignature}.${epubCoverBuffer[1].split(".").pop()}` ); } let epubID = uuid(); let epubEntry: Book = { id: epubID, userid: parsedAuthToken.id, title: epub.metadata?.title ?? epubID.split("-").pop(), author: epub.metadata?.creator ?? parsedAuthToken.email, path: epubFilePermalink, signature: epubSignature, cover: epubCoverPermalink, }; const pushed = await BOOK_DB.pushBook(epubEntry); if (!pushed) { sendJsonResponse(res, ERROR.internalErr, 500); return; } sendJsonResponse( res, { error: null, message: `successfully published a book of id ${epubEntry.id}`, data: { id: epubEntry.id, }, }, 201 ); } else if (req.method === "DELETE") { const authorization = req.headers?.authorization; const authToken = authorization?.split(" ")?.pop(); if (!authorization || !authToken) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const token = new Token(); const tokenStatus: TokStatus = token.verify(authToken); if ( tokenStatus === TokStatus.INVALID || tokenStatus === TokStatus.INVALID_SIG ) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const parsedAuthToken: any = token.UNSAFE_parse(authToken); let body: Buffer; body = await parseSimplePostData(req); let data: any; try { data = JSON.parse(body.toString()); } catch { sendJsonResponse(res, ERROR.invalidJSONData, 400); return; } if (!data.bookid) { sendJsonResponse(res, ERROR.badRequest, 400); return; } let bookDeleted = await BOOK_DB.deleteBook(data.bookid, parsedAuthToken.id); if (!bookDeleted) { sendJsonResponse(res, { error: "unable-to-delete-book", message: `was unable to delete book ${data.bookid}, perhaps the id was invalid?`, status: 404 }, 404) return; } sendJsonResponse(res, { error: null, message: `successfully deleted book of id ${data.bookid}`, status: 204, data: { id: data.bookid, } }, 204) } } finally { await BOOK_DB.close(); } }
src/routes/Books.ts
Aadv1k-quillia-52c5b34
[ { "filename": "src/routes/Issue.ts", "retrieved_chunk": " const BOOK_DB = new BookModel();\n const USER_DB = new UserModel();\n const authorization = req.headers?.authorization;\n const authToken = authorization?.split(\" \")?.pop()?.trim();\n try {\n if (req.method === \"OPTIONS\") {\n sendJsonResponse(res, {}, 200);\n return;\n }\n if (!authorization || !authToken) {", "score": 0.9253607988357544 }, { "filename": "src/routes/Issue.ts", "retrieved_chunk": " sendJsonResponse(res, ERROR.unauthorized, 401);\n return;\n }\n const token = new Token();\n const tokenStatus: TokStatus = token.verify(authToken);\n if (\n tokenStatus === TokStatus.INVALID ||\n tokenStatus === TokStatus.INVALID_SIG\n ) {\n sendJsonResponse(res, ERROR.unauthorized, 401);", "score": 0.8880348801612854 }, { "filename": "src/routes/Login.ts", "retrieved_chunk": " const DB = new UserModel();\n let data: any = await parseSimplePostData(req);\n data = data.toString();\n if (req.method !== \"POST\") {\n sendJsonResponse(res, ERROR.methodNotAllowed, 405);\n return;\n }\n let parsedData: User;\n try {\n parsedData = JSON.parse(data);", "score": 0.8497201800346375 }, { "filename": "src/routes/Login.ts", "retrieved_chunk": " }\n if (md5(parsedData.password) !== foundUser.password) {\n sendJsonResponse(res, ERROR.unauthorized, 401);\n return;\n }\n const token = new Token();\n const { password, ...tokenBody} = foundUser;\n let accessToken = token.generate(tokenBody);\n sendJsonResponse(res, {\n messaged: \"found the given user\",", "score": 0.8406044244766235 }, { "filename": "src/routes/Signup.ts", "retrieved_chunk": ") {\n const DB = new UserModel();\n if (req.method !== \"POST\") {\n sendJsonResponse(res, ERROR.methodNotAllowed, 405);\n return;\n }\n let data: any = await parseSimplePostData(req);\n data = data.toString();\n let parsedData: User;\n try {", "score": 0.8399878740310669 } ]
typescript
const tokenStatus: TokStatus = token.verify(authToken);
import IssueModel from "../models/IssueModel"; import BookModel from "../models/BookModel"; import UserModel from "../models/UserModel"; import Token from "../lib/GenerateToken"; import { ERROR } from "../common/const"; import { TokStatus, Issue } from "../common/types"; import { sendJsonResponse, sendEpubResponse, parseSimplePostData, uuid, getBufferFromRawURL, } from "../common/utils"; import http from "node:http"; import https from "node:https"; export default async function ( req: http.IncomingMessage, res: http.ServerResponse ) { const ISSUE_DB = new IssueModel(); const BOOK_DB = new BookModel(); const USER_DB = new UserModel(); const authorization = req.headers?.authorization; const authToken = authorization?.split(" ")?.pop()?.trim(); try { if (req.method === "OPTIONS") { sendJsonResponse(res, {}, 200); return; } if (!authorization || !authToken) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const token = new Token(); const tokenStatus: TokStatus = token.verify(authToken); if ( tokenStatus === TokStatus.INVALID || tokenStatus === TokStatus.INVALID_SIG ) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } await ISSUE_DB.init(); await BOOK_DB.init(); await USER_DB.init(); const parsedAuthToken: any = token.UNSAFE_parse(authToken); if (req.method === "GET") { let URLParams = req.url.split("/").slice(3); let requestedBook = URLParams?.[0]; if (requestedBook) { let targetBook = await BOOK_DB.getBook(requestedBook); if (!targetBook) { sendJsonResponse(res, ERROR.resourceNotExists, 404); return; } let epubResourcePath = targetBook.path; const response: Array<Buffer> = await new Promise((resolve, reject) => { https.get(epubResourcePath, (res) => { let data: Array<Buffer> = []; res.on("data", (d: Buffer) => data.push(d)); res.on("end", () => resolve(data)); res.on("error", (error) => reject(error)); }); }); let epubBuffer = Buffer.concat(response); sendEpubResponse(res, epubBuffer); return; } else { let userIssues = await ISSUE_DB.getIssues(parsedAuthToken.id); if (!userIssues) { sendJsonResponse(res, ERROR.resourceNotExists, 404); } else { sendJsonResponse(res, userIssues, 200); } } } else if (req.method === "POST") { if (req.headers?.["content-type"] != "application/json") { sendJsonResponse(res, ERROR.invalidMimeForResource, 415); return; }
let issueData: Issue;
try { let issuePostData = await parseSimplePostData(req); issueData = JSON.parse(issuePostData.toString()); } catch (error) { console.error(error); sendJsonResponse(res, ERROR.badRequest, 400); return; } if (!issueData.lenderid || !issueData.bookid) { sendJsonResponse(res, ERROR.badRequest, 400); return; } let foundLender = await USER_DB.getUserByID(issueData.lenderid); let foundBook = await BOOK_DB.getBook(issueData.bookid); if (!foundLender || !foundBook) { sendJsonResponse(res, ERROR.resourceNotExists, 404); return; } let foundIssue = await ISSUE_DB.getIssue( foundLender.id, foundBook.id, parsedAuthToken.id ); if (foundIssue) { sendJsonResponse( res, { ...ERROR.resourceExists, data: { id: foundIssue.id, bookid: foundIssue.bookid, }, }, 409 ); return; } let issueid = uuid(); let issueEntry: Issue = { id: issueid, borrowerid: parsedAuthToken.id, lenderid: foundLender.id, bookid: foundBook.id, }; const pushed = await ISSUE_DB.pushIssue(issueEntry); if (!pushed) { sendJsonResponse(res, ERROR.internalErr, 500); return; } sendJsonResponse( res, { error: null, message: `successfully created a new issue of id ${issueEntry.id}`, data: { id: pushed.id, borrower: pushed.borrowerid, lender: pushed.lenderid, book: foundBook.title, }, }, 201 ); } } finally { await ISSUE_DB.close(); await BOOK_DB.close(); await USER_DB.close(); } }
src/routes/Issue.ts
Aadv1k-quillia-52c5b34
[ { "filename": "src/routes/Books.ts", "retrieved_chunk": " console.error(error);\n sendJsonResponse(res, ERROR.internalErr);\n }\n } else if (req.method === \"POST\") {\n const authorization = req.headers?.authorization;\n const authToken = authorization?.split(\" \")?.pop();\n if (!authorization || !authToken) {\n sendJsonResponse(res, ERROR.unauthorized, 401);\n return;\n }", "score": 0.8853636980056763 }, { "filename": "src/routes/Books.ts", "retrieved_chunk": " id: epubEntry.id,\n },\n },\n 201\n );\n } else if (req.method === \"DELETE\") {\n const authorization = req.headers?.authorization;\n const authToken = authorization?.split(\" \")?.pop();\n if (!authorization || !authToken) {\n sendJsonResponse(res, ERROR.unauthorized, 401);", "score": 0.8743545413017273 }, { "filename": "src/routes/Login.ts", "retrieved_chunk": " const DB = new UserModel();\n let data: any = await parseSimplePostData(req);\n data = data.toString();\n if (req.method !== \"POST\") {\n sendJsonResponse(res, ERROR.methodNotAllowed, 405);\n return;\n }\n let parsedData: User;\n try {\n parsedData = JSON.parse(data);", "score": 0.8595779538154602 }, { "filename": "src/routes/Signup.ts", "retrieved_chunk": ") {\n const DB = new UserModel();\n if (req.method !== \"POST\") {\n sendJsonResponse(res, ERROR.methodNotAllowed, 405);\n return;\n }\n let data: any = await parseSimplePostData(req);\n data = data.toString();\n let parsedData: User;\n try {", "score": 0.8544892072677612 }, { "filename": "src/routes/Signup.ts", "retrieved_chunk": " parsedData = JSON.parse(data === \"\" ? '{}' : data);\n } catch {\n sendJsonResponse(res, ERROR.invalidJSONData, 400)\n return;\n }\n if (!parsedData.email || !parsedData.password) {\n sendJsonResponse(res, ERROR.badRequest, 400);\n return;\n }\n if (!isEmailValid(parsedData.email)) {", "score": 0.8376417756080627 } ]
typescript
let issueData: Issue;
import fs from 'fs/promises'; import inquirer from 'inquirer'; import pQueue from 'p-queue'; import { setTimeout as delay } from 'timers/promises'; import { Injectable, Logger } from '@nestjs/common'; import { Account as IAccount } from '../../interfaces/account.interface'; import { Secrets } from '../../interfaces/secrets.interface'; class Account implements IAccount { public readonly username: string; public readonly password: string; public sharedSecret: string | null = null; public identitySecret: string | null = null; constructor(account: string) { account = account.trim(); if (account.length === 0) throw new Error('Invalid account'); const parts = account.split(':').map((part) => part.trim()); if (parts.length < 2) throw new Error('Invalid account'); const [username, password, sharedSecret, identitySecret] = parts; this.username = username; this.password = password; if (sharedSecret) this.sharedSecret = sharedSecret; if (identitySecret) this.identitySecret = identitySecret; } } @Injectable() export class AccountsImportService { private readonly logger = new Logger(AccountsImportService.name); private readonly readFilesQueue = new pQueue({ concurrency: 100 }); public async loadAccounts(input: string[] | string) { if (!input) return []; if (!Array.isArray(input)) input = [input]; if (input.length === 0) return []; let accounts: Account[] = []; const errors: string[] = []; const readResults = await Promise.all(input.map((input) => this.readAccountsFromInput(input))); for (const result of readResults) { accounts.push(...result.values); errors.push(...result.errors); } accounts = this.removeDuplicates(accounts); if (errors.length > 0 && accounts.length > 0) { this.logger.warn(`The following account sources are invalid:\n${errors.join('\n')}`); await delay(1000); const { confirm } = await inquirer.prompt({ type: 'confirm', name: 'confirm', message: 'Continue with the valid accounts?', default: false, }); if (!confirm) throw new Error('Aborted by user'); } return accounts; } public assignSecretsToAccounts(accounts: Account[], secrets: Secrets[]) { const secretsMap = new Map<string, Secrets>(); for (const secret of secrets) {
secretsMap.set(secret.username, secret);
// some existing steam-oriented apps are case-insensitive to usernames in secrets secretsMap.set(secret.username.toLowerCase(), secret); } for (const account of accounts) { let secret = secretsMap.get(account.username); if (!secret) secret = secretsMap.get(account.username.toLowerCase()); if (!secret) continue; account.sharedSecret = secret.sharedSecret; account.identitySecret = secret.identitySecret; } } private removeDuplicates(accounts: Account[]) { const map = new Map<string, Account>(); for (const account of accounts) map.set(account.username, account); return [...map.values()]; } private async readAccountsFromInput(input: string) { const inputType = await this.inferInputType(input); if (inputType === 'file') return this.readAccountsFromFile(input); if (inputType === 'string') return this.readAccountFromString(input); if (inputType === 'directory') return { values: [], errors: [input] }; } private async readAccountsFromFile(filePath: string) { const result: { values: Account[]; errors: string[] } = { values: [], errors: [] }; try { let content = await this.readFilesQueue.add(() => fs.readFile(filePath, 'utf-8')); content = content.trim(); if (content.length === 0) throw new Error('Empty file'); // session file if (filePath.endsWith('.steamsession')) { const readResults = this.readAccountFromSessionFile(content); result.values.push(...readResults.values); if (readResults.errors.length > 0) result.errors.push(filePath); return result; } // asf json if (filePath.endsWith('.json') && content.includes('"SteamLogin"')) { const readResults = this.readAccountFromAsfJson(content); result.values.push(...readResults.values); if (readResults.errors.length > 0) result.errors.push(filePath); return result; } // plain text if (content.includes(':')) { const lines = content .split(/\s+|\r?\n/) .map((l) => l.trim()) .filter((l) => l.length > 0); if (lines.length === 0) throw new Error('Empty file'); for (const line of lines) { const readResults = this.readAccountFromString(line); result.values.push(...readResults.values); if (readResults.errors.length > 0) result.errors.push(line); } return result; } throw new Error('Unsupported file format'); } catch (error) { result.errors.push(filePath); } return result; } private readAccountFromString(str: string) { const result: { values: Account[]; errors: string[] } = { values: [], errors: [] }; try { const account = new Account(str); result.values.push(account); } catch (error) { result.errors.push(str); } return result; } private readAccountFromAsfJson(fileContent: string) { const result: { values: Account[]; errors: string[] } = { values: [], errors: [] }; try { const { SteamLogin: username, SteamPassword: password } = JSON.parse(fileContent); if (!username) throw new Error('Invalid username'); if (!password) throw new Error('Invalid password'); const account = new Account(`${username}:${password}`); result.values.push(account); } catch (error) { result.errors.push(fileContent); } return result; } private readAccountFromSessionFile(fileContent: string) { const result: { values: Account[]; errors: string[] } = { values: [], errors: [] }; try { const { Username, Password, SharedSecret, IdentitySecret } = JSON.parse(fileContent); if (!Username) throw new Error('Invalid username'); if (!Password) throw new Error('Invalid password'); const account = new Account(`${Username}:${Password}:${SharedSecret || ''}:${IdentitySecret || ''}`); result.values.push(account); } catch (error) { result.errors.push(fileContent); } return result; } private async inferInputType(input: string) { if (typeof input !== 'string') throw new Error(`Invalid input type: '${typeof input}'`); try { const stats = await fs.stat(input); if (stats.isFile()) return 'file'; if (stats.isDirectory()) return 'directory'; } catch (error) { return 'string'; } } }
src/modules/accounts-import/accounts-import.service.ts
Sadzurami-steam-sessions-creator-97b7294
[ { "filename": "src/modules/secrets-import/secrets-import.service.ts", "retrieved_chunk": " default: false,\n });\n if (!confirm) throw new Error('Aborted by user');\n }\n return secrets;\n }\n private removeDuplicates(secrets: Secrets[]) {\n const map = new Map<string, Secrets>();\n for (const secret of secrets) map.set(secret.username, secret);\n return [...map.values()];", "score": 0.9479802846908569 }, { "filename": "src/modules/sessions-import/sessions-import.service.ts", "retrieved_chunk": " });\n if (!confirm) throw new Error('Aborted by user');\n }\n return sessions;\n }\n private removeDuplicates(sessions: Session[]) {\n const map = new Map<string, Session>();\n for (const session of sessions) map.set(session.username, session);\n return [...map.values()];\n }", "score": 0.8638043403625488 }, { "filename": "src/modules/secrets-import/secrets-import.service.ts", "retrieved_chunk": " public readonly identitySecret: string;\n constructor(secrets: string) {\n let parsedSecrets: any;\n try {\n parsedSecrets = JSON.parse(secrets);\n } catch (error) {}\n if (typeof parsedSecrets !== 'object' || parsedSecrets === null) throw new Error('Secrets string is invalid');\n const { shared_secret, identity_secret, account_name } = parsedSecrets;\n if (!shared_secret) throw new Error('Shared secret is missing');\n this.sharedSecret = shared_secret;", "score": 0.837029755115509 }, { "filename": "src/modules/proxies-import/proxies-import.service.ts", "retrieved_chunk": " if (!confirm) throw new Error('Aborted by user');\n }\n return proxies;\n }\n private removeDuplicates(proxies: Proxy[]) {\n const map = new Map<string, Proxy>();\n for (const proxy of proxies) map.set(proxy.toString(), proxy);\n return [...map.values()];\n }\n private async readProxyFromInput(input: string) {", "score": 0.8355786800384521 }, { "filename": "src/modules/secrets-import/secrets-import.service.ts", "retrieved_chunk": " public async loadSecrets(input: string[] | string) {\n if (!input) return [];\n if (!Array.isArray(input)) input = [input];\n if (input.length === 0) return [];\n let secrets: Secrets[] = [];\n const errors: string[] = [];\n const readResults = await Promise.all(input.map((input) => this.readSecretsFromInput(input)));\n for (const result of readResults) {\n secrets.push(...result.values);\n errors.push(...result.errors);", "score": 0.8344019651412964 } ]
typescript
secretsMap.set(secret.username, secret);
import { Client } from "pg"; import { DB as DBConfig } from "../common/const"; import { Book } from "../common/types"; export default class BookModel { private readonly client: Client; constructor() { this.client = new Client({ host: DBConfig.HOST, user: DBConfig.USER, password: DBConfig.PASSWORD, database: DBConfig.DB_NAME, port: DBConfig.PORT, ssl: true }) } async init(): Promise<void> { try { await this.client.connect(); await this.client.query(`CREATE TABLE IF NOT EXISTS books ( id VARCHAR(255) UNIQUE NOT NULL, userid VARCHAR(255) UNIQUE NOT NULL, title VARCHAR(255) NOT NULL, author VARCHAR(255) NOT NULL, signature VARCHAR(255) NOT NULL, path VARCHAR(255) NOT NULL, cover VARCHAR(255) NOT NULL ) `); } catch (error) { throw error; } } async bookExists(bookid: string): Promise<boolean> { const result = await this.client.query("SELECT EXISTS (SELECT 1 FROM books WHERE id = $1)", [bookid]) return result.rows[0].exists } async getBooks(): Promise<Array<Book> | null> { try { let response = await this.client.query("SELECT * FROM books"); return response.rows; } catch (error) { console.error(error); return; } } async pushBook(book: Book): Promise<Book | null> { try { await this.client.query(` INSERT INTO books (id, userid, author, title, path, cover, signature) VALUES ($1, $2, $3, $4, $5, $6, $7)`, [book.id, book.userid,
book.author, book.title, book.path, book?.cover ?? "", book.signature] ) return book;
} catch (error) { console.error(error); return null; } } async deleteBook(bookid: string, userid?: string) { try { await this.client.query(`DELETE FROM books WHERE id = $1 ${userid && "AND userid = $2"}`, [bookid, userid ?? ""]); return bookid; } catch (error) { console.error(error); return null; } } async getBook(bookid: string, sig?: string): Promise<Book | null> { try { const response = await this.client.query(`SELECT * FROM books WHERE ${bookid ? "id = $1" : "signature = $1"}`, [bookid || sig]); return response.rows[0]; } catch (error) { console.error(error); return null; } } async updateBook() { /* TODO */ } async close(): Promise<void> { await this.client.end(); } }
src/models/BookModel.ts
Aadv1k-quillia-52c5b34
[ { "filename": "src/models/IssueModel.ts", "retrieved_chunk": " const result = await this.client.query(\"SELECT EXISTS (SELECT 1 FROM issues WHERE id = $1)\", [issueid])\n return result.rows[0].exists\n } \n async pushIssue(data: Issue): Promise<Issue | null> {\n try {\n await this.client.query(\n \"INSERT INTO issues (id, lenderid, borrowerid, bookid) VALUES ($1, $2, $3, $4)\",\n [data.id, data.lenderid, data.borrowerid, data.bookid]\n );\n return data;", "score": 0.8583102226257324 }, { "filename": "src/models/UserModel.ts", "retrieved_chunk": " await this.client.query(`DELETE FROM users WHERE id = $1 OR email = $2`, [user.id, user.email]);\n return user;\n } catch (error) {\n throw error;\n }\n }\n async pushUser(user: User): Promise<User | void> {\n try {\n await this.client.query(\"INSERT INTO users (id, email, password) VALUES ($1, $2, $3)\", [user.id, user.email, user.password]);\n return user;", "score": 0.8201563954353333 }, { "filename": "src/models/UserModel.ts", "retrieved_chunk": " const result = await this.client.query(\"SELECT EXISTS (SELECT 1 FROM users WHERE email = $1)\", [email])\n return result.rows[0].exists\n } \n async getUserByID(id: string): Promise<User | null> {\n try {\n const response = await this.client.query(\n `SELECT * FROM users \n WHERE id = $1`, \n [id]\n );", "score": 0.8054146766662598 }, { "filename": "src/models/UserModel.ts", "retrieved_chunk": " } catch (error) {\n throw error;\n }\n }\n async pushTokenForUser(token: string, userid: string): Promise<void | null> {\n try {\n await this.client.query(\"INSERT INTO tokens (userid, token) VALUES ($1, $2)\", [userid, token]);\n } catch (error) {\n console.error(error);\n return null;", "score": 0.7887616753578186 }, { "filename": "src/models/IssueModel.ts", "retrieved_chunk": " WHERE borrowerid = $1\n AND bookid = $2\n `,\n [borrowerid ?? null, bookid ?? null]\n );\n return response.rows[0];\n } catch (error) {\n console.error(error);\n return null;\n }", "score": 0.7749316692352295 } ]
typescript
book.author, book.title, book.path, book?.cover ?? "", book.signature] ) return book;
import { setTimeout as delay } from 'timers/promises'; import { Injectable, Logger } from '@nestjs/common'; import { ConfigService } from '@nestjs/config'; import { Session } from '../../interfaces/session.interface'; import { SteamTokensService } from '../../modules/steam-tokens/steam-tokens.service'; @Injectable() export class ValidateSessionsService { private readonly logger = new Logger(ValidateSessionsService.name); constructor(private readonly steamTokensService: SteamTokensService, private readonly configService: ConfigService) {} public async validateSessions(sessions: Session[]) { const valid: Session[] = []; const invalid: Session[] = []; for (const session of sessions) { const { valid: isValid, errors, expires } = await this.validateSession(session); if (isValid) { valid.push(session); this.logger.log( `Valid: ${session.username}, days: ${Math.floor((expires - Date.now()) / (24 * 60 * 60 * 1000))}`, ); } else { invalid.push(session); this.logger.warn(`Invalid: ${session.username}, errors: ${errors.join(', ')}`); } } if (invalid.length > 0) { this.logger.warn(`Invalid sessions:\n${invalid.map((session) => session.username).join('\n')}`); } await delay(1000); } private async validateSession(session: Session) { const errors: string[] = []; let expires = Date.now(); if (!session) errors.push('Invalid session'); if (session.schemaVersion !== this.configService.getOrThrow<number>('session.schemaVersion')) { errors.push('Outdated schema version'); } if (!session.username) errors.push('Invalid username'); if (!session.password) errors.push('Invalid password'); if (!session.steamId) errors.push('Invalid steamId'); if (!session.hasOwnProperty('sharedSecret')) errors.push('Invalid shared Secret'); if (!session.hasOwnProperty('identitySecret')) errors.push('Invalid identity Secret'); if (session.desktopRefreshToken) {
if (!this.steamTokensService.validateRefreshToken(session.desktopRefreshToken)) {
errors.push('Invalid desktop refresh token'); } const tokenExpiration = this.steamTokensService.getRefreshTokenExpiration(session.desktopRefreshToken); if (tokenExpiration > expires) expires = tokenExpiration; } if (session.mobileRefreshToken) { if (!this.steamTokensService.validateRefreshToken(session.mobileRefreshToken)) { errors.push('Invalid mobile refresh token'); } const tokenExpiration = this.steamTokensService.getRefreshTokenExpiration(session.mobileRefreshToken); if (tokenExpiration > expires) expires = tokenExpiration; } if (session.webRefreshToken) { if (!this.steamTokensService.validateRefreshToken(session.webRefreshToken)) { errors.push('Invalid web refresh token'); } const tokenExpiration = this.steamTokensService.getRefreshTokenExpiration(session.webRefreshToken); if (tokenExpiration > expires) expires = tokenExpiration; } if (expires < Date.now()) errors.push('Expired session'); return { valid: errors.length === 0, errors, expires }; } }
src/commands/validate/validate-sessions.service.ts
Sadzurami-steam-sessions-creator-97b7294
[ { "filename": "src/modules/accounts-import/accounts-import.service.ts", "retrieved_chunk": " const result: { values: Account[]; errors: string[] } = { values: [], errors: [] };\n try {\n const { SteamLogin: username, SteamPassword: password } = JSON.parse(fileContent);\n if (!username) throw new Error('Invalid username');\n if (!password) throw new Error('Invalid password');\n const account = new Account(`${username}:${password}`);\n result.values.push(account);\n } catch (error) {\n result.errors.push(fileContent);\n }", "score": 0.7649586200714111 }, { "filename": "src/modules/accounts-import/accounts-import.service.ts", "retrieved_chunk": " return result;\n }\n private readAccountFromSessionFile(fileContent: string) {\n const result: { values: Account[]; errors: string[] } = { values: [], errors: [] };\n try {\n const { Username, Password, SharedSecret, IdentitySecret } = JSON.parse(fileContent);\n if (!Username) throw new Error('Invalid username');\n if (!Password) throw new Error('Invalid password');\n const account = new Account(`${Username}:${Password}:${SharedSecret || ''}:${IdentitySecret || ''}`);\n result.values.push(account);", "score": 0.746411144733429 }, { "filename": "src/modules/steam-tokens/steam-tokens.service.ts", "retrieved_chunk": " this.throttleConnection(connectionId, this.connectionThrottlingTimeout);\n const loginSessionOptions = {};\n if (proxy) loginSessionOptions[proxy.protocol.includes('socks') ? 'socksProxy' : 'httpProxy'] = proxy.toString();\n const loginSession = new LoginSession(loginSessionPlatform, loginSessionOptions);\n loginSession.on('error', () => {}); // fallback errors handling\n try {\n const credentials = { accountName: account.username, password: account.password } as any;\n if (account.sharedSecret) credentials.steamGuardCode = SteamTotp.getAuthCode(account.sharedSecret);\n loginSession\n .startWithCredentials(credentials)", "score": 0.7394404411315918 }, { "filename": "src/commands/create/create-sessions.service.ts", "retrieved_chunk": " const desktopRefreshToken = await this.createRefreshToken(account, 'desktop');\n await delay(delayMs);\n const webRefreshToken = await this.createRefreshToken(account, 'web');\n await delay(delayMs);\n const mobileRefreshToken = await this.createRefreshToken(account, 'mobile');\n await delay(delayMs);\n const steamId = this.getSteamIdFromRefreshToken(webRefreshToken);\n const schemaVersion = this.schemaVersion;\n const session: ISession = {\n username: account.username,", "score": 0.7269317507743835 }, { "filename": "src/modules/accounts-import/accounts-import.service.ts", "retrieved_chunk": " const result: { values: Account[]; errors: string[] } = { values: [], errors: [] };\n try {\n let content = await this.readFilesQueue.add(() => fs.readFile(filePath, 'utf-8'));\n content = content.trim();\n if (content.length === 0) throw new Error('Empty file');\n // session file\n if (filePath.endsWith('.steamsession')) {\n const readResults = this.readAccountFromSessionFile(content);\n result.values.push(...readResults.values);\n if (readResults.errors.length > 0) result.errors.push(filePath);", "score": 0.7259891033172607 } ]
typescript
if (!this.steamTokensService.validateRefreshToken(session.desktopRefreshToken)) {
import { setTimeout as delay } from 'timers/promises'; import { Injectable, Logger } from '@nestjs/common'; import { ConfigService } from '@nestjs/config'; import { Session } from '../../interfaces/session.interface'; import { SteamTokensService } from '../../modules/steam-tokens/steam-tokens.service'; @Injectable() export class ValidateSessionsService { private readonly logger = new Logger(ValidateSessionsService.name); constructor(private readonly steamTokensService: SteamTokensService, private readonly configService: ConfigService) {} public async validateSessions(sessions: Session[]) { const valid: Session[] = []; const invalid: Session[] = []; for (const session of sessions) { const { valid: isValid, errors, expires } = await this.validateSession(session); if (isValid) { valid.push(session); this.logger.log( `Valid: ${session.username}, days: ${Math.floor((expires - Date.now()) / (24 * 60 * 60 * 1000))}`, ); } else { invalid.push(session); this.logger.warn(`Invalid: ${session.username}, errors: ${errors.join(', ')}`); } } if (invalid.length > 0) { this.logger.warn(`Invalid sessions:\n${invalid.map((session) => session.username).join('\n')}`); } await delay(1000); } private async validateSession(session: Session) { const errors: string[] = []; let expires = Date.now(); if (!session) errors.push('Invalid session'); if (session.schemaVersion !== this.configService.getOrThrow<number>('session.schemaVersion')) { errors.push('Outdated schema version'); } if (!session.username) errors.push('Invalid username'); if (!session.password) errors.push('Invalid password'); if (!session.steamId) errors.push('Invalid steamId'); if (!session.hasOwnProperty('sharedSecret')) errors.push('Invalid shared Secret'); if (!session.hasOwnProperty('identitySecret')) errors.push('Invalid identity Secret'); if (session.desktopRefreshToken) { if (!this.steamTokensService.validateRefreshToken(session.desktopRefreshToken)) { errors.push('Invalid desktop refresh token'); } const tokenExpiration = this.steamTokensService.getRefreshTokenExpiration(session.desktopRefreshToken); if (tokenExpiration > expires) expires = tokenExpiration; }
if (session.mobileRefreshToken) {
if (!this.steamTokensService.validateRefreshToken(session.mobileRefreshToken)) { errors.push('Invalid mobile refresh token'); } const tokenExpiration = this.steamTokensService.getRefreshTokenExpiration(session.mobileRefreshToken); if (tokenExpiration > expires) expires = tokenExpiration; } if (session.webRefreshToken) { if (!this.steamTokensService.validateRefreshToken(session.webRefreshToken)) { errors.push('Invalid web refresh token'); } const tokenExpiration = this.steamTokensService.getRefreshTokenExpiration(session.webRefreshToken); if (tokenExpiration > expires) expires = tokenExpiration; } if (expires < Date.now()) errors.push('Expired session'); return { valid: errors.length === 0, errors, expires }; } }
src/commands/validate/validate-sessions.service.ts
Sadzurami-steam-sessions-creator-97b7294
[ { "filename": "src/commands/create/create-sessions.service.ts", "retrieved_chunk": " const desktopRefreshToken = await this.createRefreshToken(account, 'desktop');\n await delay(delayMs);\n const webRefreshToken = await this.createRefreshToken(account, 'web');\n await delay(delayMs);\n const mobileRefreshToken = await this.createRefreshToken(account, 'mobile');\n await delay(delayMs);\n const steamId = this.getSteamIdFromRefreshToken(webRefreshToken);\n const schemaVersion = this.schemaVersion;\n const session: ISession = {\n username: account.username,", "score": 0.8043437004089355 }, { "filename": "src/modules/steam-tokens/steam-tokens.service.ts", "retrieved_chunk": " this.throttleConnection(connectionId, this.connectionThrottlingTimeout);\n const loginSessionOptions = {};\n if (proxy) loginSessionOptions[proxy.protocol.includes('socks') ? 'socksProxy' : 'httpProxy'] = proxy.toString();\n const loginSession = new LoginSession(loginSessionPlatform, loginSessionOptions);\n loginSession.on('error', () => {}); // fallback errors handling\n try {\n const credentials = { accountName: account.username, password: account.password } as any;\n if (account.sharedSecret) credentials.steamGuardCode = SteamTotp.getAuthCode(account.sharedSecret);\n loginSession\n .startWithCredentials(credentials)", "score": 0.7714112997055054 }, { "filename": "src/commands/create/create-sessions.service.ts", "retrieved_chunk": " });\n } catch (error) {\n throw new Error('Failed to create refresh token', { cause: error });\n }\n }\n private getSteamIdFromRefreshToken(token: string) {\n try {\n const { sub: steamId } = this.steamTokensService.decodeRefreshToken(token);\n if (!steamId) throw new Error('SteamId is missing from refresh token');\n return steamId;", "score": 0.7543751001358032 }, { "filename": "src/modules/steam-tokens/steam-tokens.service.ts", "retrieved_chunk": " .then((result) => result.actionRequired && loginSession.emit('error', new Error('Guard action required')))\n .catch((error) => loginSession.emit('error', error));\n await pEvent(loginSession, 'authenticated', { rejectionEvents: ['error', 'timeout'], timeout: 35000 });\n const refreshToken = loginSession.refreshToken;\n if (!refreshToken) throw new Error('Refresh token is empty');\n return refreshToken;\n } catch (error) {\n if (error.eresult === EResult.RateLimitExceeded) this.throttleConnection(connectionId, 31 * 60 * 1000);\n throw new Error('Failed to create refresh token', { cause: error });\n } finally {", "score": 0.7513188123703003 }, { "filename": "src/modules/secrets-import/secrets-import.service.ts", "retrieved_chunk": " public readonly identitySecret: string;\n constructor(secrets: string) {\n let parsedSecrets: any;\n try {\n parsedSecrets = JSON.parse(secrets);\n } catch (error) {}\n if (typeof parsedSecrets !== 'object' || parsedSecrets === null) throw new Error('Secrets string is invalid');\n const { shared_secret, identity_secret, account_name } = parsedSecrets;\n if (!shared_secret) throw new Error('Shared secret is missing');\n this.sharedSecret = shared_secret;", "score": 0.7401829957962036 } ]
typescript
if (session.mobileRefreshToken) {
import { setTimeout as delay } from 'timers/promises'; import { Injectable, Logger } from '@nestjs/common'; import { ConfigService } from '@nestjs/config'; import { Session } from '../../interfaces/session.interface'; import { SteamTokensService } from '../../modules/steam-tokens/steam-tokens.service'; @Injectable() export class ValidateSessionsService { private readonly logger = new Logger(ValidateSessionsService.name); constructor(private readonly steamTokensService: SteamTokensService, private readonly configService: ConfigService) {} public async validateSessions(sessions: Session[]) { const valid: Session[] = []; const invalid: Session[] = []; for (const session of sessions) { const { valid: isValid, errors, expires } = await this.validateSession(session); if (isValid) { valid.push(session); this.logger.log( `Valid: ${session.username}, days: ${Math.floor((expires - Date.now()) / (24 * 60 * 60 * 1000))}`, ); } else { invalid.push(session); this.logger.warn(`Invalid: ${session.username}, errors: ${errors.join(', ')}`); } } if (invalid.length > 0) { this.logger.warn(`Invalid sessions:\n${invalid.map((session) => session.username).join('\n')}`); } await delay(1000); } private async validateSession(session: Session) { const errors: string[] = []; let expires = Date.now(); if (!session) errors.push('Invalid session'); if (session.schemaVersion !== this.configService.getOrThrow<number>('session.schemaVersion')) { errors.push('Outdated schema version'); } if (!session.username) errors.push('Invalid username'); if (!session.password) errors.push('Invalid password'); if (!session.steamId) errors.push('Invalid steamId'); if (!session.hasOwnProperty('sharedSecret')) errors.push('Invalid shared Secret'); if (!session.hasOwnProperty('identitySecret')) errors.push('Invalid identity Secret'); if (session.desktopRefreshToken) { if (!this.steamTokensService.validateRefreshToken(session.desktopRefreshToken)) { errors.push('Invalid desktop refresh token'); } const tokenExpiration = this.steamTokensService.getRefreshTokenExpiration(session.desktopRefreshToken); if (tokenExpiration > expires) expires = tokenExpiration; } if (session.mobileRefreshToken) { if (!this.steamTokensService.validateRefreshToken(session.mobileRefreshToken)) { errors.push('Invalid mobile refresh token'); } const tokenExpiration = this.steamTokensService.getRefreshTokenExpiration(session.mobileRefreshToken); if (tokenExpiration > expires) expires = tokenExpiration; }
if (session.webRefreshToken) {
if (!this.steamTokensService.validateRefreshToken(session.webRefreshToken)) { errors.push('Invalid web refresh token'); } const tokenExpiration = this.steamTokensService.getRefreshTokenExpiration(session.webRefreshToken); if (tokenExpiration > expires) expires = tokenExpiration; } if (expires < Date.now()) errors.push('Expired session'); return { valid: errors.length === 0, errors, expires }; } }
src/commands/validate/validate-sessions.service.ts
Sadzurami-steam-sessions-creator-97b7294
[ { "filename": "src/commands/create/create-sessions.service.ts", "retrieved_chunk": " const desktopRefreshToken = await this.createRefreshToken(account, 'desktop');\n await delay(delayMs);\n const webRefreshToken = await this.createRefreshToken(account, 'web');\n await delay(delayMs);\n const mobileRefreshToken = await this.createRefreshToken(account, 'mobile');\n await delay(delayMs);\n const steamId = this.getSteamIdFromRefreshToken(webRefreshToken);\n const schemaVersion = this.schemaVersion;\n const session: ISession = {\n username: account.username,", "score": 0.8421300649642944 }, { "filename": "src/commands/create/create-sessions.service.ts", "retrieved_chunk": " });\n } catch (error) {\n throw new Error('Failed to create refresh token', { cause: error });\n }\n }\n private getSteamIdFromRefreshToken(token: string) {\n try {\n const { sub: steamId } = this.steamTokensService.decodeRefreshToken(token);\n if (!steamId) throw new Error('SteamId is missing from refresh token');\n return steamId;", "score": 0.7942031025886536 }, { "filename": "src/modules/steam-tokens/steam-tokens.service.ts", "retrieved_chunk": " .then((result) => result.actionRequired && loginSession.emit('error', new Error('Guard action required')))\n .catch((error) => loginSession.emit('error', error));\n await pEvent(loginSession, 'authenticated', { rejectionEvents: ['error', 'timeout'], timeout: 35000 });\n const refreshToken = loginSession.refreshToken;\n if (!refreshToken) throw new Error('Refresh token is empty');\n return refreshToken;\n } catch (error) {\n if (error.eresult === EResult.RateLimitExceeded) this.throttleConnection(connectionId, 31 * 60 * 1000);\n throw new Error('Failed to create refresh token', { cause: error });\n } finally {", "score": 0.776893138885498 }, { "filename": "src/modules/steam-tokens/steam-tokens.service.ts", "retrieved_chunk": " return JSON.parse(headerJson);\n } catch (error) {\n throw new Error('An error occurred while decoding refresh token', { cause: error });\n }\n }\n public validateRefreshToken(token: string) {\n try {\n const { iss, sub, exp, aud } = this.decodeRefreshToken(token);\n if (!iss || !sub || !exp || !aud) return false;\n if (iss !== 'steam') return false;", "score": 0.7709764242172241 }, { "filename": "src/modules/steam-tokens/steam-tokens.service.ts", "retrieved_chunk": " if (exp < Math.floor(Date.now() / 1000)) return false;\n if (!aud.includes('renew')) return false;\n return true;\n } catch (error) {\n return false;\n }\n }\n public getRefreshTokenExpiration(token: string) {\n try {\n const { exp } = this.decodeRefreshToken(token);", "score": 0.7690809965133667 } ]
typescript
if (session.webRefreshToken) {
import { Client } from "pg"; import { DB as DBConfig } from "../common/const"; import { Book } from "../common/types"; export default class BookModel { private readonly client: Client; constructor() { this.client = new Client({ host: DBConfig.HOST, user: DBConfig.USER, password: DBConfig.PASSWORD, database: DBConfig.DB_NAME, port: DBConfig.PORT, ssl: true }) } async init(): Promise<void> { try { await this.client.connect(); await this.client.query(`CREATE TABLE IF NOT EXISTS books ( id VARCHAR(255) UNIQUE NOT NULL, userid VARCHAR(255) UNIQUE NOT NULL, title VARCHAR(255) NOT NULL, author VARCHAR(255) NOT NULL, signature VARCHAR(255) NOT NULL, path VARCHAR(255) NOT NULL, cover VARCHAR(255) NOT NULL ) `); } catch (error) { throw error; } } async bookExists(bookid: string): Promise<boolean> { const result = await this.client.query("SELECT EXISTS (SELECT 1 FROM books WHERE id = $1)", [bookid]) return result.rows[0].exists } async getBooks(): Promise<Array<Book> | null> { try { let response = await this.client.query("SELECT * FROM books"); return response.rows; } catch (error) { console.error(error); return; } } async pushBook(book: Book): Promise<Book | null> { try { await this.client.query(` INSERT INTO books (id, userid, author, title, path, cover, signature) VALUES ($1, $2, $3, $4, $5, $6, $7)`, [book.id, book.userid
, book.author, book.title, book.path, book?.cover ?? "", book.signature] ) return book;
} catch (error) { console.error(error); return null; } } async deleteBook(bookid: string, userid?: string) { try { await this.client.query(`DELETE FROM books WHERE id = $1 ${userid && "AND userid = $2"}`, [bookid, userid ?? ""]); return bookid; } catch (error) { console.error(error); return null; } } async getBook(bookid: string, sig?: string): Promise<Book | null> { try { const response = await this.client.query(`SELECT * FROM books WHERE ${bookid ? "id = $1" : "signature = $1"}`, [bookid || sig]); return response.rows[0]; } catch (error) { console.error(error); return null; } } async updateBook() { /* TODO */ } async close(): Promise<void> { await this.client.end(); } }
src/models/BookModel.ts
Aadv1k-quillia-52c5b34
[ { "filename": "src/models/IssueModel.ts", "retrieved_chunk": " const result = await this.client.query(\"SELECT EXISTS (SELECT 1 FROM issues WHERE id = $1)\", [issueid])\n return result.rows[0].exists\n } \n async pushIssue(data: Issue): Promise<Issue | null> {\n try {\n await this.client.query(\n \"INSERT INTO issues (id, lenderid, borrowerid, bookid) VALUES ($1, $2, $3, $4)\",\n [data.id, data.lenderid, data.borrowerid, data.bookid]\n );\n return data;", "score": 0.8623102903366089 }, { "filename": "src/models/UserModel.ts", "retrieved_chunk": " await this.client.query(`DELETE FROM users WHERE id = $1 OR email = $2`, [user.id, user.email]);\n return user;\n } catch (error) {\n throw error;\n }\n }\n async pushUser(user: User): Promise<User | void> {\n try {\n await this.client.query(\"INSERT INTO users (id, email, password) VALUES ($1, $2, $3)\", [user.id, user.email, user.password]);\n return user;", "score": 0.8198654651641846 }, { "filename": "src/models/UserModel.ts", "retrieved_chunk": " const result = await this.client.query(\"SELECT EXISTS (SELECT 1 FROM users WHERE email = $1)\", [email])\n return result.rows[0].exists\n } \n async getUserByID(id: string): Promise<User | null> {\n try {\n const response = await this.client.query(\n `SELECT * FROM users \n WHERE id = $1`, \n [id]\n );", "score": 0.8009018898010254 }, { "filename": "src/models/UserModel.ts", "retrieved_chunk": " } catch (error) {\n throw error;\n }\n }\n async pushTokenForUser(token: string, userid: string): Promise<void | null> {\n try {\n await this.client.query(\"INSERT INTO tokens (userid, token) VALUES ($1, $2)\", [userid, token]);\n } catch (error) {\n console.error(error);\n return null;", "score": 0.7850345373153687 }, { "filename": "src/models/IssueModel.ts", "retrieved_chunk": " WHERE borrowerid = $1\n AND bookid = $2\n `,\n [borrowerid ?? null, bookid ?? null]\n );\n return response.rows[0];\n } catch (error) {\n console.error(error);\n return null;\n }", "score": 0.767172634601593 } ]
typescript
, book.author, book.title, book.path, book?.cover ?? "", book.signature] ) return book;
import { Client } from "pg"; import { DB as DBConfig } from "../common/const"; import { User } from "../common/types"; export default class UserModel { private readonly client: Client; constructor() { this.client = new Client({ host: DBConfig.HOST, user: DBConfig.USER, password: DBConfig.PASSWORD, database: DBConfig.DB_NAME, port: DBConfig.PORT, ssl: true }) } async init(): Promise<void> { try { await this.client.connect(); await this.client.query(` CREATE TABLE IF NOT EXISTS users ( id VARCHAR(255) UNIQUE NOT NULL, email VARCHAR(255) NOT NULL, password VARCHAR(255) NOT NULL ) `); } catch (error) { throw error } } async userExists(email?: string): Promise<boolean> { const result = await this.client.query("SELECT EXISTS (SELECT 1 FROM users WHERE email = $1)", [email]) return result.rows[0].exists } async getUserByID(id: string): Promise<User | null> { try { const response = await this.client.query( `SELECT * FROM users WHERE id = $1`, [id] ); return response.rows[0] } catch (error) { return null; } } async getUser(email: string, id?: string): Promise<User | null> { try { const response = await this.client.query( `SELECT * FROM users WHERE email = $1 OR id = $2`, [email, id ?? ""] ); return response.rows[0] } catch (error) { return null; } } async getUsers(): Promise <Array<object> | null> { try { let data = await this.client.query(`SELECT * FROM users`) return data.rows; } catch (error) { console.error(error); return null; } } async updateUser(user: User, toUpdate: object ) { let blobs = Object.keys(toUpdate).map((e, i) => {return `${e} = \$${i+1}`}) let toUpdateQuery = blobs.join(" AND "); let query = `UPDATE users SET ${toUpdateQuery} WHERE $1 OR $2` try { this.client.query(query,
[user.id, user.email]);
} catch (error) { console.error(error); return null; } } async deleteUser(user: User): Promise<User | void> { try { await this.client.query(`DELETE FROM users WHERE id = $1 OR email = $2`, [user.id, user.email]); return user; } catch (error) { throw error; } } async pushUser(user: User): Promise<User | void> { try { await this.client.query("INSERT INTO users (id, email, password) VALUES ($1, $2, $3)", [user.id, user.email, user.password]); return user; } catch (error) { throw error; } } async pushTokenForUser(token: string, userid: string): Promise<void | null> { try { await this.client.query("INSERT INTO tokens (userid, token) VALUES ($1, $2)", [userid, token]); } catch (error) { console.error(error); return null; } } async deleteTokenForUser(token?: string, userid?: string): Promise<void | null> { try { await this.client.query("DELETE FROM tokens WHERE token = $1 OR userid = $2", [token, userid]); } catch (error) { console.error(error); return null; } } async close(): Promise<void> { await this.client.end(); } }
src/models/UserModel.ts
Aadv1k-quillia-52c5b34
[ { "filename": "src/models/BookModel.ts", "retrieved_chunk": " await this.client.query(`DELETE FROM books WHERE id = $1 ${userid && \"AND userid = $2\"}`, [bookid, userid ?? \"\"]);\n return bookid;\n } catch (error) {\n console.error(error); \n return null;\n }\n }\n async getBook(bookid: string, sig?: string): Promise<Book | null> {\n try {\n const response = await this.client.query(`SELECT * FROM books WHERE ${bookid ? \"id = $1\" : \"signature = $1\"}`, [bookid || sig]);", "score": 0.7717969417572021 }, { "filename": "src/models/BookModel.ts", "retrieved_chunk": " } catch (error) {\n console.error(error);\n return;\n }\n }\n async pushBook(book: Book): Promise<Book | null> {\n try {\n await this.client.query(`\n INSERT INTO books (id, userid, author, title, path, cover, signature) \n VALUES ($1, $2, $3, $4, $5, $6, $7)`, ", "score": 0.7713070511817932 }, { "filename": "src/routes/Signup.ts", "retrieved_chunk": " id: uuid(),\n email: parsedData.email,\n password: md5(parsedData.password),\n } \n const token = new Token();\n let pushed = await DB.pushUser(user)\n const { password, ...tokenBody} = user;\n let accessToken = token.generate(tokenBody);\n if (pushed !== null) {\n sendJsonResponse(res, {", "score": 0.767831563949585 }, { "filename": "src/models/BookModel.ts", "retrieved_chunk": " }\n }\n async bookExists(bookid: string): Promise<boolean> {\n const result = await this.client.query(\"SELECT EXISTS (SELECT 1 FROM books WHERE id = $1)\", [bookid])\n return result.rows[0].exists\n } \n async getBooks(): Promise<Array<Book> | null> {\n try {\n let response = await this.client.query(\"SELECT * FROM books\");\n return response.rows;", "score": 0.7419540882110596 }, { "filename": "src/models/IssueModel.ts", "retrieved_chunk": " WHERE borrowerid = $1\n AND bookid = $2\n `,\n [borrowerid ?? null, bookid ?? null]\n );\n return response.rows[0];\n } catch (error) {\n console.error(error);\n return null;\n }", "score": 0.7416399717330933 } ]
typescript
[user.id, user.email]);
import { Client } from "pg"; import { DB as DBConfig } from "../common/const"; import { User } from "../common/types"; export default class UserModel { private readonly client: Client; constructor() { this.client = new Client({ host: DBConfig.HOST, user: DBConfig.USER, password: DBConfig.PASSWORD, database: DBConfig.DB_NAME, port: DBConfig.PORT, ssl: true }) } async init(): Promise<void> { try { await this.client.connect(); await this.client.query(` CREATE TABLE IF NOT EXISTS users ( id VARCHAR(255) UNIQUE NOT NULL, email VARCHAR(255) NOT NULL, password VARCHAR(255) NOT NULL ) `); } catch (error) { throw error } } async userExists(email?: string): Promise<boolean> { const result = await this.client.query("SELECT EXISTS (SELECT 1 FROM users WHERE email = $1)", [email]) return result.rows[0].exists } async getUserByID(id: string): Promise<User | null> { try { const response = await this.client.query( `SELECT * FROM users WHERE id = $1`, [id] ); return response.rows[0] } catch (error) { return null; } } async getUser(email: string, id?: string): Promise<User | null> { try { const response = await this.client.query( `SELECT * FROM users WHERE email = $1 OR id = $2`, [email, id ?? ""] ); return response.rows[0] } catch (error) { return null; } } async getUsers(): Promise <Array<object> | null> { try { let data = await this.client.query(`SELECT * FROM users`) return data.rows; } catch (error) { console.error(error); return null; } } async updateUser(user: User, toUpdate: object ) { let blobs = Object.keys(toUpdate).map((e, i) => {return `${e} = \$${i+1}`}) let toUpdateQuery = blobs.join(" AND "); let query = `UPDATE users SET ${toUpdateQuery} WHERE $1 OR $2` try {
this.client.query(query, [user.id, user.email]);
} catch (error) { console.error(error); return null; } } async deleteUser(user: User): Promise<User | void> { try { await this.client.query(`DELETE FROM users WHERE id = $1 OR email = $2`, [user.id, user.email]); return user; } catch (error) { throw error; } } async pushUser(user: User): Promise<User | void> { try { await this.client.query("INSERT INTO users (id, email, password) VALUES ($1, $2, $3)", [user.id, user.email, user.password]); return user; } catch (error) { throw error; } } async pushTokenForUser(token: string, userid: string): Promise<void | null> { try { await this.client.query("INSERT INTO tokens (userid, token) VALUES ($1, $2)", [userid, token]); } catch (error) { console.error(error); return null; } } async deleteTokenForUser(token?: string, userid?: string): Promise<void | null> { try { await this.client.query("DELETE FROM tokens WHERE token = $1 OR userid = $2", [token, userid]); } catch (error) { console.error(error); return null; } } async close(): Promise<void> { await this.client.end(); } }
src/models/UserModel.ts
Aadv1k-quillia-52c5b34
[ { "filename": "src/models/BookModel.ts", "retrieved_chunk": " } catch (error) {\n console.error(error);\n return;\n }\n }\n async pushBook(book: Book): Promise<Book | null> {\n try {\n await this.client.query(`\n INSERT INTO books (id, userid, author, title, path, cover, signature) \n VALUES ($1, $2, $3, $4, $5, $6, $7)`, ", "score": 0.7882142066955566 }, { "filename": "src/models/BookModel.ts", "retrieved_chunk": " await this.client.query(`DELETE FROM books WHERE id = $1 ${userid && \"AND userid = $2\"}`, [bookid, userid ?? \"\"]);\n return bookid;\n } catch (error) {\n console.error(error); \n return null;\n }\n }\n async getBook(bookid: string, sig?: string): Promise<Book | null> {\n try {\n const response = await this.client.query(`SELECT * FROM books WHERE ${bookid ? \"id = $1\" : \"signature = $1\"}`, [bookid || sig]);", "score": 0.7872486114501953 }, { "filename": "src/routes/Signup.ts", "retrieved_chunk": " id: uuid(),\n email: parsedData.email,\n password: md5(parsedData.password),\n } \n const token = new Token();\n let pushed = await DB.pushUser(user)\n const { password, ...tokenBody} = user;\n let accessToken = token.generate(tokenBody);\n if (pushed !== null) {\n sendJsonResponse(res, {", "score": 0.7759774923324585 }, { "filename": "src/models/IssueModel.ts", "retrieved_chunk": " WHERE borrowerid = $1\n AND bookid = $2\n `,\n [borrowerid ?? null, bookid ?? null]\n );\n return response.rows[0];\n } catch (error) {\n console.error(error);\n return null;\n }", "score": 0.7575729489326477 }, { "filename": "src/models/BookModel.ts", "retrieved_chunk": " }\n }\n async bookExists(bookid: string): Promise<boolean> {\n const result = await this.client.query(\"SELECT EXISTS (SELECT 1 FROM books WHERE id = $1)\", [bookid])\n return result.rows[0].exists\n } \n async getBooks(): Promise<Array<Book> | null> {\n try {\n let response = await this.client.query(\"SELECT * FROM books\");\n return response.rows;", "score": 0.7507644295692444 } ]
typescript
this.client.query(query, [user.id, user.email]);
import BookModel from "../models/BookModel"; import Bucket from "../models/Bucket"; import Token from "../lib/GenerateToken"; import { ERROR, MAX_EPUB_SIZE_MB } from "../common/const"; import { TokStatus, Book } from "../common/types"; import { sendJsonResponse, parseSimplePostData, md5, uuid, } from "../common/utils"; import filetype from "file-type-cjs"; import fs from "node:fs"; import EPub from "epub"; import os from "node:os"; import path from "node:path"; import crypto from "node:crypto"; import { exec } from "node:child_process"; import http from "node:http"; async function getEpubCoverFromEpubFile_UNIX( epubFilepath: string ): Promise<[Buffer, string] | null> { let randomString = crypto.randomBytes(16).toString("hex"); let tempDir = path.join(os.tmpdir(), `tmp-${randomString}`); fs.mkdirSync(tempDir); let unzipCMD = `unzip -q ${epubFilepath} -d ${tempDir}`; let unzipCMDExec = new Promise((resolve, reject) => { exec(unzipCMD, (err: any, stdout: any, stderr: any) => { if (err) reject(err); resolve(stdout); }); }); try { await unzipCMDExec; } catch (err) { console.error(err); fs.rmSync(tempDir, { recursive: true }); // we r good boys! return null; } let findCMD = `find ${tempDir} -type f \\( -iname \\*.jpeg -o -iname \\*.jpg -o -iname \\*.png \\) | grep -Ei 'cover\\.|index-1_1'`; let findCMDExec: Promise<string> = new Promise((resolve, reject) => { exec(findCMD, (err: any, stdout: any, stderr: any) => { if (err) reject(err); resolve(stdout); }); }); let selectedFilePath: string; try { selectedFilePath = await findCMDExec; selectedFilePath = selectedFilePath.trim(); } catch (err) { console.error(err); fs.rmSync(tempDir, { recursive: true }); // we r good boys! return null; } let ret: [Buffer, string] = [ Buffer.from(fs.readFileSync(selectedFilePath)), selectedFilePath, ]; fs.rmSync(tempDir, { recursive: true }); // we r good boys! return ret; } export default async function ( req: http.IncomingMessage, res: http.ServerResponse ) { const BOOK_DB = new BookModel(); const BUCKET = new Bucket(); await BOOK_DB.init(); await BUCKET.init(); try { if (req.method === "GET") { try { let userBooks = await BOOK_DB.getBooks(); userBooks = userBooks.map((e) => { delete e.path; return e; }); sendJsonResponse(res, userBooks, 200); } catch (error) { console.error(error);
sendJsonResponse(res, ERROR.internalErr);
} } else if (req.method === "POST") { const authorization = req.headers?.authorization; const authToken = authorization?.split(" ")?.pop(); if (!authorization || !authToken) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const token = new Token(); const tokenStatus: TokStatus = token.verify(authToken); if ( tokenStatus === TokStatus.INVALID || tokenStatus === TokStatus.INVALID_SIG ) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const parsedAuthToken: any = token.UNSAFE_parse(authToken); let epubBuffer: Buffer; epubBuffer = await parseSimplePostData(req); let epubSizeInMB = Math.ceil(epubBuffer.length / 1e6); let bufferMime = await filetype.fromBuffer(epubBuffer); if (bufferMime.mime != "application/epub+zip") { sendJsonResponse(res, ERROR.invalidMimeForResource, 415); return; } if (epubSizeInMB > MAX_EPUB_SIZE_MB) { sendJsonResponse(res, ERROR.fileTooLarge, 400); return; } let randomString = crypto.randomBytes(16).toString("hex"); const tempEpubFilePath = path.join(os.tmpdir(), `tmp-${randomString}.epub`); fs.writeFileSync(tempEpubFilePath, epubBuffer); const epub: any = await new Promise((resolve, reject) => { const epub = new EPub(tempEpubFilePath); epub.on("end", () => resolve(epub)); epub.on("error", reject); epub.parse(); }); let epubCoverBuffer = await getEpubCoverFromEpubFile_UNIX(tempEpubFilePath); console.log(epubCoverBuffer); let epubSignature = md5(epubBuffer.toString("hex")); let foundBook = await BOOK_DB.getBook("", epubSignature); if (foundBook) { sendJsonResponse( res, { ...ERROR.resourceExists, data: { id: foundBook.id, }, }, 409 ); return; } let epubFilePermalink = await BUCKET.pushBufferWithName( epubBuffer, `${epubSignature}.epub` ); let epubCoverPermalink = null; if (epubCoverBuffer) { epubCoverPermalink = await BUCKET.pushBufferWithName( epubCoverBuffer[0], `${epubSignature}.${epubCoverBuffer[1].split(".").pop()}` ); } let epubID = uuid(); let epubEntry: Book = { id: epubID, userid: parsedAuthToken.id, title: epub.metadata?.title ?? epubID.split("-").pop(), author: epub.metadata?.creator ?? parsedAuthToken.email, path: epubFilePermalink, signature: epubSignature, cover: epubCoverPermalink, }; const pushed = await BOOK_DB.pushBook(epubEntry); if (!pushed) { sendJsonResponse(res, ERROR.internalErr, 500); return; } sendJsonResponse( res, { error: null, message: `successfully published a book of id ${epubEntry.id}`, data: { id: epubEntry.id, }, }, 201 ); } else if (req.method === "DELETE") { const authorization = req.headers?.authorization; const authToken = authorization?.split(" ")?.pop(); if (!authorization || !authToken) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const token = new Token(); const tokenStatus: TokStatus = token.verify(authToken); if ( tokenStatus === TokStatus.INVALID || tokenStatus === TokStatus.INVALID_SIG ) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const parsedAuthToken: any = token.UNSAFE_parse(authToken); let body: Buffer; body = await parseSimplePostData(req); let data: any; try { data = JSON.parse(body.toString()); } catch { sendJsonResponse(res, ERROR.invalidJSONData, 400); return; } if (!data.bookid) { sendJsonResponse(res, ERROR.badRequest, 400); return; } let bookDeleted = await BOOK_DB.deleteBook(data.bookid, parsedAuthToken.id); if (!bookDeleted) { sendJsonResponse(res, { error: "unable-to-delete-book", message: `was unable to delete book ${data.bookid}, perhaps the id was invalid?`, status: 404 }, 404) return; } sendJsonResponse(res, { error: null, message: `successfully deleted book of id ${data.bookid}`, status: 204, data: { id: data.bookid, } }, 204) } } finally { await BOOK_DB.close(); } }
src/routes/Books.ts
Aadv1k-quillia-52c5b34
[ { "filename": "src/routes/Login.ts", "retrieved_chunk": " } catch(error) {\n sendJsonResponse(res, ERROR.invalidJSONData, 400)\n return;\n }\n await DB.init();\n const foundUser: User = await DB.getUser(parsedData.email);\n await DB.close();\n if (!foundUser) {\n sendJsonResponse(res, ERROR.userNotFound, 404);\n return;", "score": 0.8517471551895142 }, { "filename": "src/routes/Issue.ts", "retrieved_chunk": " let targetBook = await BOOK_DB.getBook(requestedBook);\n if (!targetBook) {\n sendJsonResponse(res, ERROR.resourceNotExists, 404);\n return;\n }\n let epubResourcePath = targetBook.path;\n const response: Array<Buffer> = await new Promise((resolve, reject) => {\n https.get(epubResourcePath, (res) => {\n let data: Array<Buffer> = [];\n res.on(\"data\", (d: Buffer) => data.push(d));", "score": 0.8342055082321167 }, { "filename": "src/routes/Signup.ts", "retrieved_chunk": " sendJsonResponse(res, ERROR.badRequest, 400);\n return;\n }\n await DB.init();\n let foundUser = await DB.getUser(parsedData.email);\n if (foundUser) {\n sendJsonResponse(res, ERROR.userAlreadyExists, 409)\n return;\n }\n let user: User = {", "score": 0.8307185173034668 }, { "filename": "src/routes/Issue.ts", "retrieved_chunk": " let issueData: Issue;\n try {\n let issuePostData = await parseSimplePostData(req);\n issueData = JSON.parse(issuePostData.toString());\n } catch (error) {\n console.error(error);\n sendJsonResponse(res, ERROR.badRequest, 400);\n return;\n }\n if (!issueData.lenderid || !issueData.bookid) {", "score": 0.8255387544631958 }, { "filename": "src/routes/Issue.ts", "retrieved_chunk": " res.on(\"end\", () => resolve(data));\n res.on(\"error\", (error) => reject(error));\n });\n });\n let epubBuffer = Buffer.concat(response);\n sendEpubResponse(res, epubBuffer);\n return;\n } else {\n let userIssues = await ISSUE_DB.getIssues(parsedAuthToken.id);\n if (!userIssues) {", "score": 0.8254587650299072 } ]
typescript
sendJsonResponse(res, ERROR.internalErr);
import BookModel from "../models/BookModel"; import Bucket from "../models/Bucket"; import Token from "../lib/GenerateToken"; import { ERROR, MAX_EPUB_SIZE_MB } from "../common/const"; import { TokStatus, Book } from "../common/types"; import { sendJsonResponse, parseSimplePostData, md5, uuid, } from "../common/utils"; import filetype from "file-type-cjs"; import fs from "node:fs"; import EPub from "epub"; import os from "node:os"; import path from "node:path"; import crypto from "node:crypto"; import { exec } from "node:child_process"; import http from "node:http"; async function getEpubCoverFromEpubFile_UNIX( epubFilepath: string ): Promise<[Buffer, string] | null> { let randomString = crypto.randomBytes(16).toString("hex"); let tempDir = path.join(os.tmpdir(), `tmp-${randomString}`); fs.mkdirSync(tempDir); let unzipCMD = `unzip -q ${epubFilepath} -d ${tempDir}`; let unzipCMDExec = new Promise((resolve, reject) => { exec(unzipCMD, (err: any, stdout: any, stderr: any) => { if (err) reject(err); resolve(stdout); }); }); try { await unzipCMDExec; } catch (err) { console.error(err); fs.rmSync(tempDir, { recursive: true }); // we r good boys! return null; } let findCMD = `find ${tempDir} -type f \\( -iname \\*.jpeg -o -iname \\*.jpg -o -iname \\*.png \\) | grep -Ei 'cover\\.|index-1_1'`; let findCMDExec: Promise<string> = new Promise((resolve, reject) => { exec(findCMD, (err: any, stdout: any, stderr: any) => { if (err) reject(err); resolve(stdout); }); }); let selectedFilePath: string; try { selectedFilePath = await findCMDExec; selectedFilePath = selectedFilePath.trim(); } catch (err) { console.error(err); fs.rmSync(tempDir, { recursive: true }); // we r good boys! return null; } let ret: [Buffer, string] = [ Buffer.from(fs.readFileSync(selectedFilePath)), selectedFilePath, ]; fs.rmSync(tempDir, { recursive: true }); // we r good boys! return ret; } export default async function ( req: http.IncomingMessage, res: http.ServerResponse ) { const BOOK_DB = new BookModel(); const BUCKET = new Bucket(); await BOOK_DB.init(); await BUCKET.init(); try { if (req.method === "GET") { try { let userBooks = await BOOK_DB.getBooks(); userBooks = userBooks.map((e) => { delete e.path; return e; }); sendJsonResponse(res, userBooks, 200); } catch (error) { console.error(error); sendJsonResponse(res, ERROR.internalErr); } } else if (req.method === "POST") { const authorization = req.headers?.authorization; const authToken = authorization?.split(" ")?.pop(); if (!authorization || !authToken) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const token = new Token(); const tokenStatus: TokStatus = token.verify(authToken); if ( tokenStatus ===
TokStatus.INVALID || tokenStatus === TokStatus.INVALID_SIG ) {
sendJsonResponse(res, ERROR.unauthorized, 401); return; } const parsedAuthToken: any = token.UNSAFE_parse(authToken); let epubBuffer: Buffer; epubBuffer = await parseSimplePostData(req); let epubSizeInMB = Math.ceil(epubBuffer.length / 1e6); let bufferMime = await filetype.fromBuffer(epubBuffer); if (bufferMime.mime != "application/epub+zip") { sendJsonResponse(res, ERROR.invalidMimeForResource, 415); return; } if (epubSizeInMB > MAX_EPUB_SIZE_MB) { sendJsonResponse(res, ERROR.fileTooLarge, 400); return; } let randomString = crypto.randomBytes(16).toString("hex"); const tempEpubFilePath = path.join(os.tmpdir(), `tmp-${randomString}.epub`); fs.writeFileSync(tempEpubFilePath, epubBuffer); const epub: any = await new Promise((resolve, reject) => { const epub = new EPub(tempEpubFilePath); epub.on("end", () => resolve(epub)); epub.on("error", reject); epub.parse(); }); let epubCoverBuffer = await getEpubCoverFromEpubFile_UNIX(tempEpubFilePath); console.log(epubCoverBuffer); let epubSignature = md5(epubBuffer.toString("hex")); let foundBook = await BOOK_DB.getBook("", epubSignature); if (foundBook) { sendJsonResponse( res, { ...ERROR.resourceExists, data: { id: foundBook.id, }, }, 409 ); return; } let epubFilePermalink = await BUCKET.pushBufferWithName( epubBuffer, `${epubSignature}.epub` ); let epubCoverPermalink = null; if (epubCoverBuffer) { epubCoverPermalink = await BUCKET.pushBufferWithName( epubCoverBuffer[0], `${epubSignature}.${epubCoverBuffer[1].split(".").pop()}` ); } let epubID = uuid(); let epubEntry: Book = { id: epubID, userid: parsedAuthToken.id, title: epub.metadata?.title ?? epubID.split("-").pop(), author: epub.metadata?.creator ?? parsedAuthToken.email, path: epubFilePermalink, signature: epubSignature, cover: epubCoverPermalink, }; const pushed = await BOOK_DB.pushBook(epubEntry); if (!pushed) { sendJsonResponse(res, ERROR.internalErr, 500); return; } sendJsonResponse( res, { error: null, message: `successfully published a book of id ${epubEntry.id}`, data: { id: epubEntry.id, }, }, 201 ); } else if (req.method === "DELETE") { const authorization = req.headers?.authorization; const authToken = authorization?.split(" ")?.pop(); if (!authorization || !authToken) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const token = new Token(); const tokenStatus: TokStatus = token.verify(authToken); if ( tokenStatus === TokStatus.INVALID || tokenStatus === TokStatus.INVALID_SIG ) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const parsedAuthToken: any = token.UNSAFE_parse(authToken); let body: Buffer; body = await parseSimplePostData(req); let data: any; try { data = JSON.parse(body.toString()); } catch { sendJsonResponse(res, ERROR.invalidJSONData, 400); return; } if (!data.bookid) { sendJsonResponse(res, ERROR.badRequest, 400); return; } let bookDeleted = await BOOK_DB.deleteBook(data.bookid, parsedAuthToken.id); if (!bookDeleted) { sendJsonResponse(res, { error: "unable-to-delete-book", message: `was unable to delete book ${data.bookid}, perhaps the id was invalid?`, status: 404 }, 404) return; } sendJsonResponse(res, { error: null, message: `successfully deleted book of id ${data.bookid}`, status: 204, data: { id: data.bookid, } }, 204) } } finally { await BOOK_DB.close(); } }
src/routes/Books.ts
Aadv1k-quillia-52c5b34
[ { "filename": "src/routes/Issue.ts", "retrieved_chunk": " sendJsonResponse(res, ERROR.unauthorized, 401);\n return;\n }\n const token = new Token();\n const tokenStatus: TokStatus = token.verify(authToken);\n if (\n tokenStatus === TokStatus.INVALID ||\n tokenStatus === TokStatus.INVALID_SIG\n ) {\n sendJsonResponse(res, ERROR.unauthorized, 401);", "score": 0.9779242873191833 }, { "filename": "src/routes/Issue.ts", "retrieved_chunk": " const BOOK_DB = new BookModel();\n const USER_DB = new UserModel();\n const authorization = req.headers?.authorization;\n const authToken = authorization?.split(\" \")?.pop()?.trim();\n try {\n if (req.method === \"OPTIONS\") {\n sendJsonResponse(res, {}, 200);\n return;\n }\n if (!authorization || !authToken) {", "score": 0.8165160417556763 }, { "filename": "src/routes/Login.ts", "retrieved_chunk": " }\n if (md5(parsedData.password) !== foundUser.password) {\n sendJsonResponse(res, ERROR.unauthorized, 401);\n return;\n }\n const token = new Token();\n const { password, ...tokenBody} = foundUser;\n let accessToken = token.generate(tokenBody);\n sendJsonResponse(res, {\n messaged: \"found the given user\",", "score": 0.8091861009597778 }, { "filename": "src/routes/Signup.ts", "retrieved_chunk": ") {\n const DB = new UserModel();\n if (req.method !== \"POST\") {\n sendJsonResponse(res, ERROR.methodNotAllowed, 405);\n return;\n }\n let data: any = await parseSimplePostData(req);\n data = data.toString();\n let parsedData: User;\n try {", "score": 0.8007282018661499 }, { "filename": "src/models/UserModel.ts", "retrieved_chunk": " }\n }\n async deleteTokenForUser(token?: string, userid?: string): Promise<void | null> {\n try {\n await this.client.query(\"DELETE FROM tokens WHERE token = $1 OR userid = $2\", [token, userid]);\n } catch (error) {\n console.error(error);\n return null;\n }\n }", "score": 0.7948453426361084 } ]
typescript
TokStatus.INVALID || tokenStatus === TokStatus.INVALID_SIG ) {
import fs from 'fs/promises'; import inquirer from 'inquirer'; import pQueue from 'p-queue'; import { setTimeout as delay } from 'timers/promises'; import { Injectable, Logger } from '@nestjs/common'; import { Account as IAccount } from '../../interfaces/account.interface'; import { Secrets } from '../../interfaces/secrets.interface'; class Account implements IAccount { public readonly username: string; public readonly password: string; public sharedSecret: string | null = null; public identitySecret: string | null = null; constructor(account: string) { account = account.trim(); if (account.length === 0) throw new Error('Invalid account'); const parts = account.split(':').map((part) => part.trim()); if (parts.length < 2) throw new Error('Invalid account'); const [username, password, sharedSecret, identitySecret] = parts; this.username = username; this.password = password; if (sharedSecret) this.sharedSecret = sharedSecret; if (identitySecret) this.identitySecret = identitySecret; } } @Injectable() export class AccountsImportService { private readonly logger = new Logger(AccountsImportService.name); private readonly readFilesQueue = new pQueue({ concurrency: 100 }); public async loadAccounts(input: string[] | string) { if (!input) return []; if (!Array.isArray(input)) input = [input]; if (input.length === 0) return []; let accounts: Account[] = []; const errors: string[] = []; const readResults = await Promise.all(input.map((input) => this.readAccountsFromInput(input))); for (const result of readResults) { accounts.push(...result.values); errors.push(...result.errors); } accounts = this.removeDuplicates(accounts); if (errors.length > 0 && accounts.length > 0) { this.logger.warn(`The following account sources are invalid:\n${errors.join('\n')}`); await delay(1000); const { confirm } = await inquirer.prompt({ type: 'confirm', name: 'confirm', message: 'Continue with the valid accounts?', default: false, }); if (!confirm) throw new Error('Aborted by user'); } return accounts; } public assignSecretsToAccounts(accounts: Account[], secrets: Secrets[]) { const secretsMap = new Map<string, Secrets>(); for (const secret of secrets) { secretsMap.set(secret.username, secret); // some existing steam-oriented apps are case-insensitive to usernames in secrets secretsMap.set(secret.username.toLowerCase(), secret); } for (const account of accounts) { let secret = secretsMap.get(account.username); if (!secret) secret = secretsMap.get(account.username.toLowerCase()); if (!secret) continue; account.sharedSecret = secret.sharedSecret;
account.identitySecret = secret.identitySecret;
} } private removeDuplicates(accounts: Account[]) { const map = new Map<string, Account>(); for (const account of accounts) map.set(account.username, account); return [...map.values()]; } private async readAccountsFromInput(input: string) { const inputType = await this.inferInputType(input); if (inputType === 'file') return this.readAccountsFromFile(input); if (inputType === 'string') return this.readAccountFromString(input); if (inputType === 'directory') return { values: [], errors: [input] }; } private async readAccountsFromFile(filePath: string) { const result: { values: Account[]; errors: string[] } = { values: [], errors: [] }; try { let content = await this.readFilesQueue.add(() => fs.readFile(filePath, 'utf-8')); content = content.trim(); if (content.length === 0) throw new Error('Empty file'); // session file if (filePath.endsWith('.steamsession')) { const readResults = this.readAccountFromSessionFile(content); result.values.push(...readResults.values); if (readResults.errors.length > 0) result.errors.push(filePath); return result; } // asf json if (filePath.endsWith('.json') && content.includes('"SteamLogin"')) { const readResults = this.readAccountFromAsfJson(content); result.values.push(...readResults.values); if (readResults.errors.length > 0) result.errors.push(filePath); return result; } // plain text if (content.includes(':')) { const lines = content .split(/\s+|\r?\n/) .map((l) => l.trim()) .filter((l) => l.length > 0); if (lines.length === 0) throw new Error('Empty file'); for (const line of lines) { const readResults = this.readAccountFromString(line); result.values.push(...readResults.values); if (readResults.errors.length > 0) result.errors.push(line); } return result; } throw new Error('Unsupported file format'); } catch (error) { result.errors.push(filePath); } return result; } private readAccountFromString(str: string) { const result: { values: Account[]; errors: string[] } = { values: [], errors: [] }; try { const account = new Account(str); result.values.push(account); } catch (error) { result.errors.push(str); } return result; } private readAccountFromAsfJson(fileContent: string) { const result: { values: Account[]; errors: string[] } = { values: [], errors: [] }; try { const { SteamLogin: username, SteamPassword: password } = JSON.parse(fileContent); if (!username) throw new Error('Invalid username'); if (!password) throw new Error('Invalid password'); const account = new Account(`${username}:${password}`); result.values.push(account); } catch (error) { result.errors.push(fileContent); } return result; } private readAccountFromSessionFile(fileContent: string) { const result: { values: Account[]; errors: string[] } = { values: [], errors: [] }; try { const { Username, Password, SharedSecret, IdentitySecret } = JSON.parse(fileContent); if (!Username) throw new Error('Invalid username'); if (!Password) throw new Error('Invalid password'); const account = new Account(`${Username}:${Password}:${SharedSecret || ''}:${IdentitySecret || ''}`); result.values.push(account); } catch (error) { result.errors.push(fileContent); } return result; } private async inferInputType(input: string) { if (typeof input !== 'string') throw new Error(`Invalid input type: '${typeof input}'`); try { const stats = await fs.stat(input); if (stats.isFile()) return 'file'; if (stats.isDirectory()) return 'directory'; } catch (error) { return 'string'; } } }
src/modules/accounts-import/accounts-import.service.ts
Sadzurami-steam-sessions-creator-97b7294
[ { "filename": "src/modules/secrets-import/secrets-import.service.ts", "retrieved_chunk": " default: false,\n });\n if (!confirm) throw new Error('Aborted by user');\n }\n return secrets;\n }\n private removeDuplicates(secrets: Secrets[]) {\n const map = new Map<string, Secrets>();\n for (const secret of secrets) map.set(secret.username, secret);\n return [...map.values()];", "score": 0.7793920040130615 }, { "filename": "src/modules/secrets-import/secrets-import.service.ts", "retrieved_chunk": " public readonly identitySecret: string;\n constructor(secrets: string) {\n let parsedSecrets: any;\n try {\n parsedSecrets = JSON.parse(secrets);\n } catch (error) {}\n if (typeof parsedSecrets !== 'object' || parsedSecrets === null) throw new Error('Secrets string is invalid');\n const { shared_secret, identity_secret, account_name } = parsedSecrets;\n if (!shared_secret) throw new Error('Shared secret is missing');\n this.sharedSecret = shared_secret;", "score": 0.7709853053092957 }, { "filename": "src/modules/steam-tokens/steam-tokens.service.ts", "retrieved_chunk": " this.throttleConnection(connectionId, this.connectionThrottlingTimeout);\n const loginSessionOptions = {};\n if (proxy) loginSessionOptions[proxy.protocol.includes('socks') ? 'socksProxy' : 'httpProxy'] = proxy.toString();\n const loginSession = new LoginSession(loginSessionPlatform, loginSessionOptions);\n loginSession.on('error', () => {}); // fallback errors handling\n try {\n const credentials = { accountName: account.username, password: account.password } as any;\n if (account.sharedSecret) credentials.steamGuardCode = SteamTotp.getAuthCode(account.sharedSecret);\n loginSession\n .startWithCredentials(credentials)", "score": 0.7554628252983093 }, { "filename": "src/commands/create/create-sessions.service.ts", "retrieved_chunk": " const desktopRefreshToken = await this.createRefreshToken(account, 'desktop');\n await delay(delayMs);\n const webRefreshToken = await this.createRefreshToken(account, 'web');\n await delay(delayMs);\n const mobileRefreshToken = await this.createRefreshToken(account, 'mobile');\n await delay(delayMs);\n const steamId = this.getSteamIdFromRefreshToken(webRefreshToken);\n const schemaVersion = this.schemaVersion;\n const session: ISession = {\n username: account.username,", "score": 0.7475252151489258 }, { "filename": "src/modules/secrets-import/secrets-import.service.ts", "retrieved_chunk": " if (!identity_secret) throw new Error('Identity secret is missing');\n this.identitySecret = identity_secret;\n if (!account_name) throw new Error('Account name is missing');\n this.username = account_name;\n }\n}\n@Injectable()\nexport class SecretsImportService {\n private readonly logger = new Logger(SecretsImportService.name);\n private readonly readFilesQueue = new pQueue({ concurrency: 100 });", "score": 0.725435197353363 } ]
typescript
account.identitySecret = secret.identitySecret;
import BookModel from "../models/BookModel"; import Bucket from "../models/Bucket"; import Token from "../lib/GenerateToken"; import { ERROR, MAX_EPUB_SIZE_MB } from "../common/const"; import { TokStatus, Book } from "../common/types"; import { sendJsonResponse, parseSimplePostData, md5, uuid, } from "../common/utils"; import filetype from "file-type-cjs"; import fs from "node:fs"; import EPub from "epub"; import os from "node:os"; import path from "node:path"; import crypto from "node:crypto"; import { exec } from "node:child_process"; import http from "node:http"; async function getEpubCoverFromEpubFile_UNIX( epubFilepath: string ): Promise<[Buffer, string] | null> { let randomString = crypto.randomBytes(16).toString("hex"); let tempDir = path.join(os.tmpdir(), `tmp-${randomString}`); fs.mkdirSync(tempDir); let unzipCMD = `unzip -q ${epubFilepath} -d ${tempDir}`; let unzipCMDExec = new Promise((resolve, reject) => { exec(unzipCMD, (err: any, stdout: any, stderr: any) => { if (err) reject(err); resolve(stdout); }); }); try { await unzipCMDExec; } catch (err) { console.error(err); fs.rmSync(tempDir, { recursive: true }); // we r good boys! return null; } let findCMD = `find ${tempDir} -type f \\( -iname \\*.jpeg -o -iname \\*.jpg -o -iname \\*.png \\) | grep -Ei 'cover\\.|index-1_1'`; let findCMDExec: Promise<string> = new Promise((resolve, reject) => { exec(findCMD, (err: any, stdout: any, stderr: any) => { if (err) reject(err); resolve(stdout); }); }); let selectedFilePath: string; try { selectedFilePath = await findCMDExec; selectedFilePath = selectedFilePath.trim(); } catch (err) { console.error(err); fs.rmSync(tempDir, { recursive: true }); // we r good boys! return null; } let ret: [Buffer, string] = [ Buffer.from(fs.readFileSync(selectedFilePath)), selectedFilePath, ]; fs.rmSync(tempDir, { recursive: true }); // we r good boys! return ret; } export default async function ( req: http.IncomingMessage, res: http.ServerResponse ) { const BOOK_DB = new BookModel(); const BUCKET = new Bucket(); await BOOK_DB.init(); await BUCKET.init(); try { if (req.method === "GET") { try {
let userBooks = await BOOK_DB.getBooks();
userBooks = userBooks.map((e) => { delete e.path; return e; }); sendJsonResponse(res, userBooks, 200); } catch (error) { console.error(error); sendJsonResponse(res, ERROR.internalErr); } } else if (req.method === "POST") { const authorization = req.headers?.authorization; const authToken = authorization?.split(" ")?.pop(); if (!authorization || !authToken) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const token = new Token(); const tokenStatus: TokStatus = token.verify(authToken); if ( tokenStatus === TokStatus.INVALID || tokenStatus === TokStatus.INVALID_SIG ) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const parsedAuthToken: any = token.UNSAFE_parse(authToken); let epubBuffer: Buffer; epubBuffer = await parseSimplePostData(req); let epubSizeInMB = Math.ceil(epubBuffer.length / 1e6); let bufferMime = await filetype.fromBuffer(epubBuffer); if (bufferMime.mime != "application/epub+zip") { sendJsonResponse(res, ERROR.invalidMimeForResource, 415); return; } if (epubSizeInMB > MAX_EPUB_SIZE_MB) { sendJsonResponse(res, ERROR.fileTooLarge, 400); return; } let randomString = crypto.randomBytes(16).toString("hex"); const tempEpubFilePath = path.join(os.tmpdir(), `tmp-${randomString}.epub`); fs.writeFileSync(tempEpubFilePath, epubBuffer); const epub: any = await new Promise((resolve, reject) => { const epub = new EPub(tempEpubFilePath); epub.on("end", () => resolve(epub)); epub.on("error", reject); epub.parse(); }); let epubCoverBuffer = await getEpubCoverFromEpubFile_UNIX(tempEpubFilePath); console.log(epubCoverBuffer); let epubSignature = md5(epubBuffer.toString("hex")); let foundBook = await BOOK_DB.getBook("", epubSignature); if (foundBook) { sendJsonResponse( res, { ...ERROR.resourceExists, data: { id: foundBook.id, }, }, 409 ); return; } let epubFilePermalink = await BUCKET.pushBufferWithName( epubBuffer, `${epubSignature}.epub` ); let epubCoverPermalink = null; if (epubCoverBuffer) { epubCoverPermalink = await BUCKET.pushBufferWithName( epubCoverBuffer[0], `${epubSignature}.${epubCoverBuffer[1].split(".").pop()}` ); } let epubID = uuid(); let epubEntry: Book = { id: epubID, userid: parsedAuthToken.id, title: epub.metadata?.title ?? epubID.split("-").pop(), author: epub.metadata?.creator ?? parsedAuthToken.email, path: epubFilePermalink, signature: epubSignature, cover: epubCoverPermalink, }; const pushed = await BOOK_DB.pushBook(epubEntry); if (!pushed) { sendJsonResponse(res, ERROR.internalErr, 500); return; } sendJsonResponse( res, { error: null, message: `successfully published a book of id ${epubEntry.id}`, data: { id: epubEntry.id, }, }, 201 ); } else if (req.method === "DELETE") { const authorization = req.headers?.authorization; const authToken = authorization?.split(" ")?.pop(); if (!authorization || !authToken) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const token = new Token(); const tokenStatus: TokStatus = token.verify(authToken); if ( tokenStatus === TokStatus.INVALID || tokenStatus === TokStatus.INVALID_SIG ) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const parsedAuthToken: any = token.UNSAFE_parse(authToken); let body: Buffer; body = await parseSimplePostData(req); let data: any; try { data = JSON.parse(body.toString()); } catch { sendJsonResponse(res, ERROR.invalidJSONData, 400); return; } if (!data.bookid) { sendJsonResponse(res, ERROR.badRequest, 400); return; } let bookDeleted = await BOOK_DB.deleteBook(data.bookid, parsedAuthToken.id); if (!bookDeleted) { sendJsonResponse(res, { error: "unable-to-delete-book", message: `was unable to delete book ${data.bookid}, perhaps the id was invalid?`, status: 404 }, 404) return; } sendJsonResponse(res, { error: null, message: `successfully deleted book of id ${data.bookid}`, status: 204, data: { id: data.bookid, } }, 204) } } finally { await BOOK_DB.close(); } }
src/routes/Books.ts
Aadv1k-quillia-52c5b34
[ { "filename": "src/routes/Issue.ts", "retrieved_chunk": " return;\n }\n await ISSUE_DB.init();\n await BOOK_DB.init();\n await USER_DB.init();\n const parsedAuthToken: any = token.UNSAFE_parse(authToken);\n if (req.method === \"GET\") {\n let URLParams = req.url.split(\"/\").slice(3);\n let requestedBook = URLParams?.[0];\n if (requestedBook) {", "score": 0.8291519284248352 }, { "filename": "src/models/BookModel.ts", "retrieved_chunk": " database: DBConfig.DB_NAME,\n port: DBConfig.PORT,\n ssl: true\n })\n }\n async init(): Promise<void> {\n try {\n await this.client.connect();\n await this.client.query(`CREATE TABLE IF NOT EXISTS books (\n id VARCHAR(255) UNIQUE NOT NULL,", "score": 0.7925185561180115 }, { "filename": "src/models/UserModel.ts", "retrieved_chunk": " database: DBConfig.DB_NAME,\n port: DBConfig.PORT,\n ssl: true\n })\n }\n async init(): Promise<void> {\n try {\n await this.client.connect();\n await this.client.query(`\n CREATE TABLE IF NOT EXISTS users (", "score": 0.78165203332901 }, { "filename": "src/routes/Issue.ts", "retrieved_chunk": " const BOOK_DB = new BookModel();\n const USER_DB = new UserModel();\n const authorization = req.headers?.authorization;\n const authToken = authorization?.split(\" \")?.pop()?.trim();\n try {\n if (req.method === \"OPTIONS\") {\n sendJsonResponse(res, {}, 200);\n return;\n }\n if (!authorization || !authToken) {", "score": 0.7652073502540588 }, { "filename": "src/routes/Issue.ts", "retrieved_chunk": " },\n },\n 201\n );\n }\n } finally {\n await ISSUE_DB.close();\n await BOOK_DB.close();\n await USER_DB.close();\n }", "score": 0.7644737958908081 } ]
typescript
let userBooks = await BOOK_DB.getBooks();
import fs from 'fs/promises'; import inquirer from 'inquirer'; import { setTimeout as delay } from 'timers/promises'; import { Injectable, Logger } from '@nestjs/common'; import { Proxy as IProxy } from '../../interfaces/proxy.interface'; class Proxy implements IProxy { public readonly host: string; public readonly port: number; public readonly protocol: string; public readonly auth?: { username: string; password: string }; constructor(proxy: string) { if (!/^(https?|socks5?):\/\/([-\w:@.^&]+)$/.test(proxy)) throw new Error('Invalid proxy'); const url = new URL(proxy); this.host = url.hostname; this.port = Number(url.port); this.protocol = url.protocol.replace(/:$/, ''); if (url.username.length > 0 && url.password.length > 0) { this.auth = { username: url.username, password: url.password }; } } public toString() { return this.auth ? `${this.protocol}://${this.auth.username}:${this.auth.password}@${this.host}:${this.port}` : `${this.protocol}://${this.host}:${this.port}`; } } @Injectable() export class ProxiesImportService { private readonly logger = new Logger(ProxiesImportService.name); public async loadProxies(input: string[] | string) { if (!input) return []; if (!Array.isArray(input)) input = [input]; if (input.length === 0) return []; let proxies: Proxy[] = []; const errors: string[] = []; const readResults = await Promise.all(input.map((input) => this.readProxyFromInput(input))); for (const result of readResults) { proxies.push(...result.values); errors.push(...result.errors); } proxies = this.removeDuplicates(proxies); if (errors.length > 0) { this.logger.warn(`The following proxy sources are invalid:\n${errors.join('\n')}`); await delay(1000); const { confirm } = await inquirer.prompt({ type: 'confirm', name: 'confirm', message: proxies.length > 0 ? `Continue with ${proxies.length} valid proxies ?` : 'Continue without proxies ?', default: false, }); if (!confirm) throw new Error('Aborted by user'); } return proxies; } private removeDuplicates(proxies: Proxy[]) { const map = new Map<string, Proxy>(); for (const proxy of proxies) map.set(proxy.toString(), proxy); return [...map.values()]; } private async readProxyFromInput(input: string) { const inputType = await this.inferInputType(input); if (inputType === 'file') return await this.readProxyFromFile(input); if (inputType === 'string') return this.readProxyFromString(input); if (inputType === 'directory') return { values: [], errors: [input] }; } private readProxyFromString(str: string) { const result: { values: Proxy[]; errors: string[] } = { values: [], errors: [] }; try {
const proxy = new Proxy(str);
result.values.push(proxy); } catch (error) { result.errors.push(str); } return result; } private async readProxyFromFile(path: string) { const result: { values: Proxy[]; errors: string[] } = { values: [], errors: [] }; try { const file = await fs.readFile(path, 'utf8'); const lines = file .split(/\s+|\r?\n/) .map((line) => line.trim()) .filter((line) => line.length > 0); if (lines.length === 0) throw new Error(`File '${path}' is empty`); for (const line of lines) { const { values, errors } = this.readProxyFromString(line); result.values.push(...values); result.errors.push(...errors); } } catch (error) { result.errors.push(path); } return result; } private async inferInputType(input: string) { if (typeof input !== 'string') throw new Error(`Invalid input type: '${typeof input}'`); try { const stats = await fs.stat(input); if (stats.isFile()) return 'file'; if (stats.isDirectory()) return 'directory'; } catch (error) { return 'string'; } } }
src/modules/proxies-import/proxies-import.service.ts
Sadzurami-steam-sessions-creator-97b7294
[ { "filename": "src/modules/sessions-import/sessions-import.service.ts", "retrieved_chunk": " private async readSessionsFromInput(input: string) {\n const inputType = await this.inferInputType(input);\n if (inputType === 'file') return this.readSessionFromFile(input);\n if (inputType === 'string') return { values: [], errors: [input] };\n if (inputType === 'directory') return { values: [], errors: [input] };\n }\n private async readSessionFromFile(filePath: string) {\n const result: { values: Session[]; errors: string[] } = { values: [], errors: [] };\n try {\n let content = await this.readFilesQueue.add(() => fs.readFile(filePath, 'utf-8'));", "score": 0.941750705242157 }, { "filename": "src/modules/secrets-import/secrets-import.service.ts", "retrieved_chunk": " }\n private async readSecretsFromInput(input: string) {\n const inputType = await this.inferInputType(input);\n if (inputType === 'file') return this.readSecretsFromFile(input);\n if (inputType === 'string') return { values: [], errors: [input] };\n if (inputType === 'directory') return { values: [], errors: [input] };\n }\n private async readSecretsFromFile(filePath: string) {\n const result: { values: Secrets[]; errors: string[] } = { values: [], errors: [] };\n try {", "score": 0.9369098544120789 }, { "filename": "src/modules/accounts-import/accounts-import.service.ts", "retrieved_chunk": " for (const account of accounts) map.set(account.username, account);\n return [...map.values()];\n }\n private async readAccountsFromInput(input: string) {\n const inputType = await this.inferInputType(input);\n if (inputType === 'file') return this.readAccountsFromFile(input);\n if (inputType === 'string') return this.readAccountFromString(input);\n if (inputType === 'directory') return { values: [], errors: [input] };\n }\n private async readAccountsFromFile(filePath: string) {", "score": 0.9035225510597229 }, { "filename": "src/modules/accounts-import/accounts-import.service.ts", "retrieved_chunk": " } catch (error) {\n result.errors.push(fileContent);\n }\n return result;\n }\n private async inferInputType(input: string) {\n if (typeof input !== 'string') throw new Error(`Invalid input type: '${typeof input}'`);\n try {\n const stats = await fs.stat(input);\n if (stats.isFile()) return 'file';", "score": 0.8741997480392456 }, { "filename": "src/modules/secrets-import/secrets-import.service.ts", "retrieved_chunk": " const secrets = new Secrets(content);\n result.values.push(secrets);\n } catch (error) {\n result.errors.push(filePath);\n }\n return result;\n }\n private async inferInputType(input: string) {\n if (typeof input !== 'string') throw new Error(`Invalid input type: '${typeof input}'`);\n try {", "score": 0.8732241988182068 } ]
typescript
const proxy = new Proxy(str);
import BookModel from "../models/BookModel"; import Bucket from "../models/Bucket"; import Token from "../lib/GenerateToken"; import { ERROR, MAX_EPUB_SIZE_MB } from "../common/const"; import { TokStatus, Book } from "../common/types"; import { sendJsonResponse, parseSimplePostData, md5, uuid, } from "../common/utils"; import filetype from "file-type-cjs"; import fs from "node:fs"; import EPub from "epub"; import os from "node:os"; import path from "node:path"; import crypto from "node:crypto"; import { exec } from "node:child_process"; import http from "node:http"; async function getEpubCoverFromEpubFile_UNIX( epubFilepath: string ): Promise<[Buffer, string] | null> { let randomString = crypto.randomBytes(16).toString("hex"); let tempDir = path.join(os.tmpdir(), `tmp-${randomString}`); fs.mkdirSync(tempDir); let unzipCMD = `unzip -q ${epubFilepath} -d ${tempDir}`; let unzipCMDExec = new Promise((resolve, reject) => { exec(unzipCMD, (err: any, stdout: any, stderr: any) => { if (err) reject(err); resolve(stdout); }); }); try { await unzipCMDExec; } catch (err) { console.error(err); fs.rmSync(tempDir, { recursive: true }); // we r good boys! return null; } let findCMD = `find ${tempDir} -type f \\( -iname \\*.jpeg -o -iname \\*.jpg -o -iname \\*.png \\) | grep -Ei 'cover\\.|index-1_1'`; let findCMDExec: Promise<string> = new Promise((resolve, reject) => { exec(findCMD, (err: any, stdout: any, stderr: any) => { if (err) reject(err); resolve(stdout); }); }); let selectedFilePath: string; try { selectedFilePath = await findCMDExec; selectedFilePath = selectedFilePath.trim(); } catch (err) { console.error(err); fs.rmSync(tempDir, { recursive: true }); // we r good boys! return null; } let ret: [Buffer, string] = [ Buffer.from(fs.readFileSync(selectedFilePath)), selectedFilePath, ]; fs.rmSync(tempDir, { recursive: true }); // we r good boys! return ret; } export default async function ( req: http.IncomingMessage, res: http.ServerResponse ) { const BOOK_DB = new BookModel(); const BUCKET = new Bucket(); await BOOK_DB.init();
await BUCKET.init();
try { if (req.method === "GET") { try { let userBooks = await BOOK_DB.getBooks(); userBooks = userBooks.map((e) => { delete e.path; return e; }); sendJsonResponse(res, userBooks, 200); } catch (error) { console.error(error); sendJsonResponse(res, ERROR.internalErr); } } else if (req.method === "POST") { const authorization = req.headers?.authorization; const authToken = authorization?.split(" ")?.pop(); if (!authorization || !authToken) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const token = new Token(); const tokenStatus: TokStatus = token.verify(authToken); if ( tokenStatus === TokStatus.INVALID || tokenStatus === TokStatus.INVALID_SIG ) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const parsedAuthToken: any = token.UNSAFE_parse(authToken); let epubBuffer: Buffer; epubBuffer = await parseSimplePostData(req); let epubSizeInMB = Math.ceil(epubBuffer.length / 1e6); let bufferMime = await filetype.fromBuffer(epubBuffer); if (bufferMime.mime != "application/epub+zip") { sendJsonResponse(res, ERROR.invalidMimeForResource, 415); return; } if (epubSizeInMB > MAX_EPUB_SIZE_MB) { sendJsonResponse(res, ERROR.fileTooLarge, 400); return; } let randomString = crypto.randomBytes(16).toString("hex"); const tempEpubFilePath = path.join(os.tmpdir(), `tmp-${randomString}.epub`); fs.writeFileSync(tempEpubFilePath, epubBuffer); const epub: any = await new Promise((resolve, reject) => { const epub = new EPub(tempEpubFilePath); epub.on("end", () => resolve(epub)); epub.on("error", reject); epub.parse(); }); let epubCoverBuffer = await getEpubCoverFromEpubFile_UNIX(tempEpubFilePath); console.log(epubCoverBuffer); let epubSignature = md5(epubBuffer.toString("hex")); let foundBook = await BOOK_DB.getBook("", epubSignature); if (foundBook) { sendJsonResponse( res, { ...ERROR.resourceExists, data: { id: foundBook.id, }, }, 409 ); return; } let epubFilePermalink = await BUCKET.pushBufferWithName( epubBuffer, `${epubSignature}.epub` ); let epubCoverPermalink = null; if (epubCoverBuffer) { epubCoverPermalink = await BUCKET.pushBufferWithName( epubCoverBuffer[0], `${epubSignature}.${epubCoverBuffer[1].split(".").pop()}` ); } let epubID = uuid(); let epubEntry: Book = { id: epubID, userid: parsedAuthToken.id, title: epub.metadata?.title ?? epubID.split("-").pop(), author: epub.metadata?.creator ?? parsedAuthToken.email, path: epubFilePermalink, signature: epubSignature, cover: epubCoverPermalink, }; const pushed = await BOOK_DB.pushBook(epubEntry); if (!pushed) { sendJsonResponse(res, ERROR.internalErr, 500); return; } sendJsonResponse( res, { error: null, message: `successfully published a book of id ${epubEntry.id}`, data: { id: epubEntry.id, }, }, 201 ); } else if (req.method === "DELETE") { const authorization = req.headers?.authorization; const authToken = authorization?.split(" ")?.pop(); if (!authorization || !authToken) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const token = new Token(); const tokenStatus: TokStatus = token.verify(authToken); if ( tokenStatus === TokStatus.INVALID || tokenStatus === TokStatus.INVALID_SIG ) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const parsedAuthToken: any = token.UNSAFE_parse(authToken); let body: Buffer; body = await parseSimplePostData(req); let data: any; try { data = JSON.parse(body.toString()); } catch { sendJsonResponse(res, ERROR.invalidJSONData, 400); return; } if (!data.bookid) { sendJsonResponse(res, ERROR.badRequest, 400); return; } let bookDeleted = await BOOK_DB.deleteBook(data.bookid, parsedAuthToken.id); if (!bookDeleted) { sendJsonResponse(res, { error: "unable-to-delete-book", message: `was unable to delete book ${data.bookid}, perhaps the id was invalid?`, status: 404 }, 404) return; } sendJsonResponse(res, { error: null, message: `successfully deleted book of id ${data.bookid}`, status: 204, data: { id: data.bookid, } }, 204) } } finally { await BOOK_DB.close(); } }
src/routes/Books.ts
Aadv1k-quillia-52c5b34
[ { "filename": "src/models/BookModel.ts", "retrieved_chunk": " database: DBConfig.DB_NAME,\n port: DBConfig.PORT,\n ssl: true\n })\n }\n async init(): Promise<void> {\n try {\n await this.client.connect();\n await this.client.query(`CREATE TABLE IF NOT EXISTS books (\n id VARCHAR(255) UNIQUE NOT NULL,", "score": 0.8121455907821655 }, { "filename": "src/models/UserModel.ts", "retrieved_chunk": " database: DBConfig.DB_NAME,\n port: DBConfig.PORT,\n ssl: true\n })\n }\n async init(): Promise<void> {\n try {\n await this.client.connect();\n await this.client.query(`\n CREATE TABLE IF NOT EXISTS users (", "score": 0.7962951064109802 }, { "filename": "src/routes/Issue.ts", "retrieved_chunk": " return;\n }\n await ISSUE_DB.init();\n await BOOK_DB.init();\n await USER_DB.init();\n const parsedAuthToken: any = token.UNSAFE_parse(authToken);\n if (req.method === \"GET\") {\n let URLParams = req.url.split(\"/\").slice(3);\n let requestedBook = URLParams?.[0];\n if (requestedBook) {", "score": 0.7888708114624023 }, { "filename": "src/models/IssueModel.ts", "retrieved_chunk": " database: DBConfig.DB_NAME,\n port: DBConfig.PORT,\n ssl: true,\n });\n }\n async init(): Promise<void> {\n try {\n await this.client.connect();\n await this.client.query(`CREATE TABLE IF NOT EXISTS issues (\n id VARCHAR(255) UNIQUE NOT NULL,", "score": 0.7826101183891296 }, { "filename": "src/models/BookModel.ts", "retrieved_chunk": "import { Client } from \"pg\";\nimport { DB as DBConfig } from \"../common/const\";\nimport { Book } from \"../common/types\";\nexport default class BookModel {\n private readonly client: Client;\n constructor() {\n this.client = new Client({\n host: DBConfig.HOST,\n user: DBConfig.USER,\n password: DBConfig.PASSWORD,", "score": 0.7782484292984009 } ]
typescript
await BUCKET.init();
import { Cache } from 'cache-manager'; import pEvent from 'p-event'; import { EAuthTokenPlatformType, EResult, LoginSession } from 'steam-session'; import SteamTotp from 'steam-totp'; import { CACHE_MANAGER } from '@nestjs/cache-manager'; import { Inject, Injectable } from '@nestjs/common'; import { Account } from '../../interfaces/account.interface'; import { ProxiesService } from '../proxies/proxies.service'; @Injectable() export class SteamTokensService { private readonly connectionThrottlingTimeout = 31 * 1000; constructor( @Inject(CACHE_MANAGER) private throttledConnections: Cache, private readonly proxiesService: ProxiesService, ) {} public async createRefreshToken(account: Account, platform: 'web' | 'mobile' | 'desktop') { const loginSessionPlatform = this.inferLoginSessionPlatform(platform); const proxy = await this.proxiesService.getProxy(); const connectionId = this.inferConnectionId((proxy || '').toString()); await this.waitConnectionLimitReset(connectionId); this.throttleConnection(connectionId, this.connectionThrottlingTimeout); const loginSessionOptions = {}; if (proxy) loginSessionOptions[proxy.protocol.includes('socks') ? 'socksProxy' : 'httpProxy'] = proxy.toString(); const loginSession = new LoginSession(loginSessionPlatform, loginSessionOptions); loginSession.on('error', () => {}); // fallback errors handling try { const credentials = { accountName: account.username, password: account.password } as any; if (account.sharedSecret) credentials.steamGuardCode = SteamTotp.getAuthCode(account.sharedSecret); loginSession .startWithCredentials(credentials) .then((result) => result.actionRequired && loginSession.emit('error', new Error('Guard action required'))) .catch((error) => loginSession.emit('error', error)); await pEvent(loginSession, 'authenticated', { rejectionEvents: ['error', 'timeout'], timeout: 35000 }); const refreshToken = loginSession.refreshToken; if (!refreshToken) throw new Error('Refresh token is empty'); return refreshToken; } catch (error) { if (error.eresult === EResult.RateLimitExceeded) this.throttleConnection(connectionId, 31 * 60 * 1000); throw new Error('Failed to create refresh token', { cause: error }); } finally { if (loginSession) loginSession.cancelLoginAttempt(); } } public decodeRefreshToken(token: string) { try { const parts = token.split('.'); if (parts.length !== 3) throw new Error('Invalid token'); const headerBase64Url = parts[1]; const headerBase64 = headerBase64Url.replace(/-/g, '+').replace(/_/g, '/'); const headerJson = Buffer.from(headerBase64, 'base64').toString('utf-8'); return JSON.parse(headerJson); } catch (error) { throw new Error('An error occurred while decoding refresh token', { cause: error }); } } public validateRefreshToken(token: string) { try { const { iss, sub, exp, aud } = this.decodeRefreshToken(token); if (!iss || !sub || !exp || !aud) return false; if (iss !== 'steam') return false; if (exp < Math.floor(Date.now() / 1000)) return false; if (!aud.includes('renew')) return false; return true; } catch (error) { return false; } } public getRefreshTokenExpiration(token: string) { try { const { exp } = this.decodeRefreshToken(token); return exp * 1000; } catch (error) { return 0; } } private inferLoginSessionPlatform(platform: 'web' | 'mobile' | 'desktop'): EAuthTokenPlatformType { if (platform === 'web') return EAuthTokenPlatformType.WebBrowser; else if (platform === 'mobile') return EAuthTokenPlatformType.MobileApp; else if (platform === 'desktop') return EAuthTokenPlatformType.SteamClient; else throw new Error('Invalid platform'); } private inferConnectionId(id?: string) { return `${SteamTokensService.name}:${id || 'localhost'}`; } private throttleConnection(connectionId: string, timeoutMs: number) { connectionId = this.inferConnectionId(connectionId); this.throttledConnections.set(connectionId, true, timeoutMs); if
(this.inferConnectionId() !== connectionId) this.proxiesService.throttleProxy(connectionId, timeoutMs);
} private async waitConnectionLimitReset(connectionId: string) { connectionId = this.inferConnectionId(connectionId); const execute = () => { if (this.throttledConnections.get(connectionId)) return false; this.throttleConnection(connectionId, 1000); return true; }; if (execute()) return; return new Promise<void>((resolve) => { const interval = setInterval(() => { if (!execute()) return; clearInterval(interval); resolve(); }, 1000); }); } }
src/modules/steam-tokens/steam-tokens.service.ts
Sadzurami-steam-sessions-creator-97b7294
[ { "filename": "src/modules/proxies/proxies.service.ts", "retrieved_chunk": " return proxy;\n }\n public getProxiesCount() {\n return this.proxies.size;\n }\n public throttleProxy(proxy: Proxy | string, timeoutMs?: number) {\n const proxyId = this.getProxyId(proxy);\n this.throttledProxies.set(proxyId, true, timeoutMs);\n }\n private async fetchProxy() {", "score": 0.8386924266815186 }, { "filename": "src/modules/proxies/proxies.service.ts", "retrieved_chunk": " return proxy;\n }\n private findAvailableProxy(): Proxy | null {\n for (const proxy of this.proxies.values()) {\n const proxyId = this.getProxyId(proxy);\n if (this.throttledProxies.get(proxyId)) continue;\n return proxy;\n }\n return null;\n }", "score": 0.7901618480682373 }, { "filename": "src/modules/proxies/proxies.service.ts", "retrieved_chunk": " public setProxies(proxies: Proxy[]) {\n if (proxies.length === 0) return;\n for (const proxy of proxies) {\n this.proxies.set(proxy.toString(), proxy);\n }\n }\n public async getProxy(): Promise<Proxy | null> {\n if (this.proxies.size === 0) return null;\n const proxy = await this.proxiesUsageQueue.add(() => this.fetchProxy());\n this.throttleProxy(proxy);", "score": 0.7851283550262451 }, { "filename": "src/commands/create/create-sessions.service.ts", "retrieved_chunk": " });\n } catch (error) {\n throw new Error('Failed to create refresh token', { cause: error });\n }\n }\n private getSteamIdFromRefreshToken(token: string) {\n try {\n const { sub: steamId } = this.steamTokensService.decodeRefreshToken(token);\n if (!steamId) throw new Error('SteamId is missing from refresh token');\n return steamId;", "score": 0.7678359150886536 }, { "filename": "src/commands/create/create-sessions.service.ts", "retrieved_chunk": " } catch (error) {\n throw new Error('Failed to create session', { cause: error });\n }\n }\n private async createRefreshToken(account: Account, platform: 'web' | 'mobile' | 'desktop') {\n try {\n return await pRetry(() => this.steamTokensService.createRefreshToken(account, platform), {\n retries: 3,\n minTimeout: 31000,\n maxTimeout: 31000,", "score": 0.7643805742263794 } ]
typescript
(this.inferConnectionId() !== connectionId) this.proxiesService.throttleProxy(connectionId, timeoutMs);
import BookModel from "../models/BookModel"; import Bucket from "../models/Bucket"; import Token from "../lib/GenerateToken"; import { ERROR, MAX_EPUB_SIZE_MB } from "../common/const"; import { TokStatus, Book } from "../common/types"; import { sendJsonResponse, parseSimplePostData, md5, uuid, } from "../common/utils"; import filetype from "file-type-cjs"; import fs from "node:fs"; import EPub from "epub"; import os from "node:os"; import path from "node:path"; import crypto from "node:crypto"; import { exec } from "node:child_process"; import http from "node:http"; async function getEpubCoverFromEpubFile_UNIX( epubFilepath: string ): Promise<[Buffer, string] | null> { let randomString = crypto.randomBytes(16).toString("hex"); let tempDir = path.join(os.tmpdir(), `tmp-${randomString}`); fs.mkdirSync(tempDir); let unzipCMD = `unzip -q ${epubFilepath} -d ${tempDir}`; let unzipCMDExec = new Promise((resolve, reject) => { exec(unzipCMD, (err: any, stdout: any, stderr: any) => { if (err) reject(err); resolve(stdout); }); }); try { await unzipCMDExec; } catch (err) { console.error(err); fs.rmSync(tempDir, { recursive: true }); // we r good boys! return null; } let findCMD = `find ${tempDir} -type f \\( -iname \\*.jpeg -o -iname \\*.jpg -o -iname \\*.png \\) | grep -Ei 'cover\\.|index-1_1'`; let findCMDExec: Promise<string> = new Promise((resolve, reject) => { exec(findCMD, (err: any, stdout: any, stderr: any) => { if (err) reject(err); resolve(stdout); }); }); let selectedFilePath: string; try { selectedFilePath = await findCMDExec; selectedFilePath = selectedFilePath.trim(); } catch (err) { console.error(err); fs.rmSync(tempDir, { recursive: true }); // we r good boys! return null; } let ret: [Buffer, string] = [ Buffer.from(fs.readFileSync(selectedFilePath)), selectedFilePath, ]; fs.rmSync(tempDir, { recursive: true }); // we r good boys! return ret; } export default async function ( req: http.IncomingMessage, res: http.ServerResponse ) { const BOOK_DB = new BookModel(); const BUCKET = new Bucket(); await BOOK_DB.init(); await BUCKET.init(); try { if (req.method === "GET") { try { let userBooks = await BOOK_DB.getBooks(); userBooks = userBooks.map((e) => { delete e.path; return e; }); sendJsonResponse(res, userBooks, 200); } catch (error) { console.error(error); sendJsonResponse(res, ERROR.internalErr); } } else if (req.method === "POST") { const authorization = req.headers?.authorization; const authToken = authorization?.split(" ")?.pop(); if (!authorization || !authToken) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const token = new Token(); const tokenStatus: TokStatus = token.verify(authToken); if ( tokenStatus === TokStatus.INVALID || tokenStatus === TokStatus.INVALID_SIG ) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const parsedAuthToken: any = token.UNSAFE_parse(authToken); let epubBuffer: Buffer; epubBuffer = await parseSimplePostData(req); let epubSizeInMB = Math.ceil(epubBuffer.length / 1e6); let bufferMime = await filetype.fromBuffer(epubBuffer); if (bufferMime.mime != "application/epub+zip") { sendJsonResponse(res, ERROR.invalidMimeForResource, 415); return; } if (epubSizeInMB > MAX_EPUB_SIZE_MB) {
sendJsonResponse(res, ERROR.fileTooLarge, 400);
return; } let randomString = crypto.randomBytes(16).toString("hex"); const tempEpubFilePath = path.join(os.tmpdir(), `tmp-${randomString}.epub`); fs.writeFileSync(tempEpubFilePath, epubBuffer); const epub: any = await new Promise((resolve, reject) => { const epub = new EPub(tempEpubFilePath); epub.on("end", () => resolve(epub)); epub.on("error", reject); epub.parse(); }); let epubCoverBuffer = await getEpubCoverFromEpubFile_UNIX(tempEpubFilePath); console.log(epubCoverBuffer); let epubSignature = md5(epubBuffer.toString("hex")); let foundBook = await BOOK_DB.getBook("", epubSignature); if (foundBook) { sendJsonResponse( res, { ...ERROR.resourceExists, data: { id: foundBook.id, }, }, 409 ); return; } let epubFilePermalink = await BUCKET.pushBufferWithName( epubBuffer, `${epubSignature}.epub` ); let epubCoverPermalink = null; if (epubCoverBuffer) { epubCoverPermalink = await BUCKET.pushBufferWithName( epubCoverBuffer[0], `${epubSignature}.${epubCoverBuffer[1].split(".").pop()}` ); } let epubID = uuid(); let epubEntry: Book = { id: epubID, userid: parsedAuthToken.id, title: epub.metadata?.title ?? epubID.split("-").pop(), author: epub.metadata?.creator ?? parsedAuthToken.email, path: epubFilePermalink, signature: epubSignature, cover: epubCoverPermalink, }; const pushed = await BOOK_DB.pushBook(epubEntry); if (!pushed) { sendJsonResponse(res, ERROR.internalErr, 500); return; } sendJsonResponse( res, { error: null, message: `successfully published a book of id ${epubEntry.id}`, data: { id: epubEntry.id, }, }, 201 ); } else if (req.method === "DELETE") { const authorization = req.headers?.authorization; const authToken = authorization?.split(" ")?.pop(); if (!authorization || !authToken) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const token = new Token(); const tokenStatus: TokStatus = token.verify(authToken); if ( tokenStatus === TokStatus.INVALID || tokenStatus === TokStatus.INVALID_SIG ) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const parsedAuthToken: any = token.UNSAFE_parse(authToken); let body: Buffer; body = await parseSimplePostData(req); let data: any; try { data = JSON.parse(body.toString()); } catch { sendJsonResponse(res, ERROR.invalidJSONData, 400); return; } if (!data.bookid) { sendJsonResponse(res, ERROR.badRequest, 400); return; } let bookDeleted = await BOOK_DB.deleteBook(data.bookid, parsedAuthToken.id); if (!bookDeleted) { sendJsonResponse(res, { error: "unable-to-delete-book", message: `was unable to delete book ${data.bookid}, perhaps the id was invalid?`, status: 404 }, 404) return; } sendJsonResponse(res, { error: null, message: `successfully deleted book of id ${data.bookid}`, status: 204, data: { id: data.bookid, } }, 204) } } finally { await BOOK_DB.close(); } }
src/routes/Books.ts
Aadv1k-quillia-52c5b34
[ { "filename": "src/routes/Issue.ts", "retrieved_chunk": " let targetBook = await BOOK_DB.getBook(requestedBook);\n if (!targetBook) {\n sendJsonResponse(res, ERROR.resourceNotExists, 404);\n return;\n }\n let epubResourcePath = targetBook.path;\n const response: Array<Buffer> = await new Promise((resolve, reject) => {\n https.get(epubResourcePath, (res) => {\n let data: Array<Buffer> = [];\n res.on(\"data\", (d: Buffer) => data.push(d));", "score": 0.805643618106842 }, { "filename": "src/routes/Login.ts", "retrieved_chunk": " const DB = new UserModel();\n let data: any = await parseSimplePostData(req);\n data = data.toString();\n if (req.method !== \"POST\") {\n sendJsonResponse(res, ERROR.methodNotAllowed, 405);\n return;\n }\n let parsedData: User;\n try {\n parsedData = JSON.parse(data);", "score": 0.7778581380844116 }, { "filename": "src/routes/Issue.ts", "retrieved_chunk": " res.on(\"end\", () => resolve(data));\n res.on(\"error\", (error) => reject(error));\n });\n });\n let epubBuffer = Buffer.concat(response);\n sendEpubResponse(res, epubBuffer);\n return;\n } else {\n let userIssues = await ISSUE_DB.getIssues(parsedAuthToken.id);\n if (!userIssues) {", "score": 0.7772011756896973 }, { "filename": "src/routes/Issue.ts", "retrieved_chunk": " sendJsonResponse(res, ERROR.resourceNotExists, 404);\n } else {\n sendJsonResponse(res, userIssues, 200);\n }\n }\n } else if (req.method === \"POST\") {\n if (req.headers?.[\"content-type\"] != \"application/json\") {\n sendJsonResponse(res, ERROR.invalidMimeForResource, 415);\n return;\n }", "score": 0.7767367362976074 }, { "filename": "src/routes/Issue.ts", "retrieved_chunk": " let issueData: Issue;\n try {\n let issuePostData = await parseSimplePostData(req);\n issueData = JSON.parse(issuePostData.toString());\n } catch (error) {\n console.error(error);\n sendJsonResponse(res, ERROR.badRequest, 400);\n return;\n }\n if (!issueData.lenderid || !issueData.bookid) {", "score": 0.7692471742630005 } ]
typescript
sendJsonResponse(res, ERROR.fileTooLarge, 400);
import fs from 'fs/promises'; import inquirer from 'inquirer'; import pQueue from 'p-queue'; import path from 'path'; import { setTimeout as delay } from 'timers/promises'; import { Injectable, Logger } from '@nestjs/common'; import { Secrets as ISecrets } from '../../interfaces/secrets.interface'; class Secrets implements ISecrets { public readonly username: string; public readonly sharedSecret: string; public readonly identitySecret: string; constructor(secrets: string) { let parsedSecrets: any; try { parsedSecrets = JSON.parse(secrets); } catch (error) {} if (typeof parsedSecrets !== 'object' || parsedSecrets === null) throw new Error('Secrets string is invalid'); const { shared_secret, identity_secret, account_name } = parsedSecrets; if (!shared_secret) throw new Error('Shared secret is missing'); this.sharedSecret = shared_secret; if (!identity_secret) throw new Error('Identity secret is missing'); this.identitySecret = identity_secret; if (!account_name) throw new Error('Account name is missing'); this.username = account_name; } } @Injectable() export class SecretsImportService { private readonly logger = new Logger(SecretsImportService.name); private readonly readFilesQueue = new pQueue({ concurrency: 100 }); public async loadSecrets(input: string[] | string) { if (!input) return []; if (!Array.isArray(input)) input = [input]; if (input.length === 0) return []; let secrets: Secrets[] = []; const errors: string[] = []; const readResults = await Promise.all(input.map((input) => this.readSecretsFromInput(input))); for (const result of readResults) { secrets.push(...result.values); errors.push(...result.errors); } secrets = this.removeDuplicates(secrets); if (errors.length > 0) { this.logger.warn(`The following secret sources are invalid:\n${errors.join('\n')}`); await delay(1000); const { confirm } = await inquirer.prompt({ type: 'confirm', name: 'confirm', message: secrets.length > 0 ? `Continue with ${secrets.length} valid secrets?` : 'Continue without any secrets?', default: false, }); if (!confirm) throw new Error('Aborted by user'); } return secrets; } private removeDuplicates(secrets: Secrets[]) { const map = new Map<string, Secrets>(); for (const secret of secrets) map.set(secret.username, secret); return [...map.values()]; } private async readSecretsFromInput(input: string) { const inputType = await this.inferInputType(input); if (inputType === 'file') return this.readSecretsFromFile(input); if (inputType === 'string') return { values: [], errors: [input] }; if (inputType === 'directory') return { values: [], errors: [input] }; } private async readSecretsFromFile(filePath: string) { const result: { values: Secrets[]; errors: string[] } = { values: [], errors: [] }; try { const fileExtension = path.extname(filePath); // mafile if (fileExtension.toLowerCase() === '.mafile') { const readResult = await this.readSecretsFromMaFile(filePath); result.values.push(...readResult.values); if (readResult.errors.length > 0) result.errors.push(filePath); return result; } // asf db if (fileExtension === '.db') { const readResult = await this.readSecretsFromAsfDbFile(filePath); result.values.push(...readResult.values); if (readResult.errors.length > 0) result.errors.push(filePath); return result; } throw new Error('Unsupported file format'); } catch (error) { result.errors.push(filePath); } return result; } private async readSecretsFromMaFile(filePath: string) { const result: { values: Secrets[]; errors: string[] } = { values: [], errors: [] }; try { let content = await this.readFilesQueue.add(() => fs.readFile(filePath, 'utf-8')); content = content.trim().replace(/},\s*}/g, '}}'); const secrets =
new Secrets(content);
result.values.push(secrets); } catch (error) { result.errors.push(filePath); } return result; } private async readSecretsFromAsfDbFile(filePath: string) { const result: { values: Secrets[]; errors: string[] } = { values: [], errors: [] }; try { let content = await this.readFilesQueue.add(() => fs.readFile(filePath, 'utf-8')); const parsedContent = JSON.parse(content)['_MobileAuthenticator']; parsedContent['account_name'] = path.basename(filePath, path.extname(filePath)); content = JSON.stringify(parsedContent); const secrets = new Secrets(content); result.values.push(secrets); } catch (error) { result.errors.push(filePath); } return result; } private async inferInputType(input: string) { if (typeof input !== 'string') throw new Error(`Invalid input type: '${typeof input}'`); try { const stats = await fs.stat(input); if (stats.isFile()) return 'file'; if (stats.isDirectory()) return 'directory'; } catch (error) { return 'string'; } } }
src/modules/secrets-import/secrets-import.service.ts
Sadzurami-steam-sessions-creator-97b7294
[ { "filename": "src/modules/proxies-import/proxies-import.service.ts", "retrieved_chunk": " } catch (error) {\n result.errors.push(str);\n }\n return result;\n }\n private async readProxyFromFile(path: string) {\n const result: { values: Proxy[]; errors: string[] } = { values: [], errors: [] };\n try {\n const file = await fs.readFile(path, 'utf8');\n const lines = file", "score": 0.9038546085357666 }, { "filename": "src/modules/sessions-import/sessions-import.service.ts", "retrieved_chunk": " private async readSessionsFromInput(input: string) {\n const inputType = await this.inferInputType(input);\n if (inputType === 'file') return this.readSessionFromFile(input);\n if (inputType === 'string') return { values: [], errors: [input] };\n if (inputType === 'directory') return { values: [], errors: [input] };\n }\n private async readSessionFromFile(filePath: string) {\n const result: { values: Session[]; errors: string[] } = { values: [], errors: [] };\n try {\n let content = await this.readFilesQueue.add(() => fs.readFile(filePath, 'utf-8'));", "score": 0.8754994869232178 }, { "filename": "src/modules/accounts-import/accounts-import.service.ts", "retrieved_chunk": " const result: { values: Account[]; errors: string[] } = { values: [], errors: [] };\n try {\n const account = new Account(str);\n result.values.push(account);\n } catch (error) {\n result.errors.push(str);\n }\n return result;\n }\n private readAccountFromAsfJson(fileContent: string) {", "score": 0.8410925269126892 }, { "filename": "src/modules/proxies-import/proxies-import.service.ts", "retrieved_chunk": " const inputType = await this.inferInputType(input);\n if (inputType === 'file') return await this.readProxyFromFile(input);\n if (inputType === 'string') return this.readProxyFromString(input);\n if (inputType === 'directory') return { values: [], errors: [input] };\n }\n private readProxyFromString(str: string) {\n const result: { values: Proxy[]; errors: string[] } = { values: [], errors: [] };\n try {\n const proxy = new Proxy(str);\n result.values.push(proxy);", "score": 0.8403218984603882 }, { "filename": "src/modules/accounts-import/accounts-import.service.ts", "retrieved_chunk": " const result: { values: Account[]; errors: string[] } = { values: [], errors: [] };\n try {\n let content = await this.readFilesQueue.add(() => fs.readFile(filePath, 'utf-8'));\n content = content.trim();\n if (content.length === 0) throw new Error('Empty file');\n // session file\n if (filePath.endsWith('.steamsession')) {\n const readResults = this.readAccountFromSessionFile(content);\n result.values.push(...readResults.values);\n if (readResults.errors.length > 0) result.errors.push(filePath);", "score": 0.8351747989654541 } ]
typescript
new Secrets(content);
import BookModel from "../models/BookModel"; import Bucket from "../models/Bucket"; import Token from "../lib/GenerateToken"; import { ERROR, MAX_EPUB_SIZE_MB } from "../common/const"; import { TokStatus, Book } from "../common/types"; import { sendJsonResponse, parseSimplePostData, md5, uuid, } from "../common/utils"; import filetype from "file-type-cjs"; import fs from "node:fs"; import EPub from "epub"; import os from "node:os"; import path from "node:path"; import crypto from "node:crypto"; import { exec } from "node:child_process"; import http from "node:http"; async function getEpubCoverFromEpubFile_UNIX( epubFilepath: string ): Promise<[Buffer, string] | null> { let randomString = crypto.randomBytes(16).toString("hex"); let tempDir = path.join(os.tmpdir(), `tmp-${randomString}`); fs.mkdirSync(tempDir); let unzipCMD = `unzip -q ${epubFilepath} -d ${tempDir}`; let unzipCMDExec = new Promise((resolve, reject) => { exec(unzipCMD, (err: any, stdout: any, stderr: any) => { if (err) reject(err); resolve(stdout); }); }); try { await unzipCMDExec; } catch (err) { console.error(err); fs.rmSync(tempDir, { recursive: true }); // we r good boys! return null; } let findCMD = `find ${tempDir} -type f \\( -iname \\*.jpeg -o -iname \\*.jpg -o -iname \\*.png \\) | grep -Ei 'cover\\.|index-1_1'`; let findCMDExec: Promise<string> = new Promise((resolve, reject) => { exec(findCMD, (err: any, stdout: any, stderr: any) => { if (err) reject(err); resolve(stdout); }); }); let selectedFilePath: string; try { selectedFilePath = await findCMDExec; selectedFilePath = selectedFilePath.trim(); } catch (err) { console.error(err); fs.rmSync(tempDir, { recursive: true }); // we r good boys! return null; } let ret: [Buffer, string] = [ Buffer.from(fs.readFileSync(selectedFilePath)), selectedFilePath, ]; fs.rmSync(tempDir, { recursive: true }); // we r good boys! return ret; } export default async function ( req: http.IncomingMessage, res: http.ServerResponse ) { const BOOK_DB = new BookModel(); const BUCKET = new Bucket(); await BOOK_DB.init(); await BUCKET.init(); try { if (req.method === "GET") { try { let userBooks = await BOOK_DB.getBooks(); userBooks = userBooks.map((e) => { delete e.path; return e; }); sendJsonResponse(res, userBooks, 200); } catch (error) { console.error(error); sendJsonResponse(res, ERROR.internalErr); } } else if (req.method === "POST") { const authorization = req.headers?.authorization; const authToken = authorization?.split(" ")?.pop(); if (!authorization || !authToken) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const token = new Token(); const tokenStatus: TokStatus = token.verify(authToken); if ( tokenStatus === TokStatus.INVALID || tokenStatus === TokStatus.INVALID_SIG ) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const parsedAuthToken: any = token.UNSAFE_parse(authToken); let epubBuffer: Buffer; epubBuffer = await parseSimplePostData(req); let epubSizeInMB = Math.ceil(epubBuffer.length / 1e6); let bufferMime = await filetype.fromBuffer(epubBuffer); if (bufferMime.mime != "application/epub+zip") { sendJsonResponse(res, ERROR.invalidMimeForResource, 415); return; } if (epubSizeInMB > MAX_EPUB_SIZE_MB) { sendJsonResponse(res, ERROR.fileTooLarge, 400); return; } let randomString = crypto.randomBytes(16).toString("hex"); const tempEpubFilePath = path.join(os.tmpdir(), `tmp-${randomString}.epub`); fs.writeFileSync(tempEpubFilePath, epubBuffer); const epub: any = await new Promise((resolve, reject) => { const epub = new EPub(tempEpubFilePath); epub.on("end", () => resolve(epub)); epub.on("error", reject); epub.parse(); }); let epubCoverBuffer = await getEpubCoverFromEpubFile_UNIX(tempEpubFilePath); console.log(epubCoverBuffer); let epubSignature = md5(epubBuffer.toString("hex")); let foundBook = await BOOK_DB.getBook("", epubSignature); if (foundBook) { sendJsonResponse( res, { ...ERROR.resourceExists, data: { id: foundBook.id, }, }, 409 ); return; } let epubFilePermalink = await BUCKET.pushBufferWithName( epubBuffer, `${epubSignature}.epub` ); let epubCoverPermalink = null; if (epubCoverBuffer) { epubCoverPermalink = await BUCKET.pushBufferWithName( epubCoverBuffer[0], `${epubSignature}.${epubCoverBuffer[1].split(".").pop()}` ); } let epubID = uuid(); let epubEntry: Book = { id: epubID, userid: parsedAuthToken.id, title: epub.metadata?.title ?? epubID.split("-").pop(), author: epub.metadata?.creator ?? parsedAuthToken.email, path: epubFilePermalink, signature: epubSignature, cover: epubCoverPermalink, }; const pushed = await BOOK_DB.pushBook(epubEntry); if (!pushed) { sendJsonResponse(res, ERROR.internalErr, 500); return; } sendJsonResponse( res, { error: null, message: `successfully published a book of id ${epubEntry.id}`, data: { id: epubEntry.id, }, }, 201 ); } else if (req.method === "DELETE") { const authorization = req.headers?.authorization; const authToken = authorization?.split(" ")?.pop(); if (!authorization || !authToken) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const token = new Token(); const tokenStatus: TokStatus = token.verify(authToken); if ( tokenStatus === TokStatus.INVALID || tokenStatus === TokStatus.INVALID_SIG ) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const parsedAuthToken: any = token.UNSAFE_parse(authToken); let body: Buffer; body = await parseSimplePostData(req); let data: any; try { data = JSON.parse(body.toString()); } catch { sendJsonResponse(res, ERROR.invalidJSONData, 400); return; } if (!data.bookid) { sendJsonResponse(res, ERROR.badRequest, 400); return; } let bookDeleted =
await BOOK_DB.deleteBook(data.bookid, parsedAuthToken.id);
if (!bookDeleted) { sendJsonResponse(res, { error: "unable-to-delete-book", message: `was unable to delete book ${data.bookid}, perhaps the id was invalid?`, status: 404 }, 404) return; } sendJsonResponse(res, { error: null, message: `successfully deleted book of id ${data.bookid}`, status: 204, data: { id: data.bookid, } }, 204) } } finally { await BOOK_DB.close(); } }
src/routes/Books.ts
Aadv1k-quillia-52c5b34
[ { "filename": "src/routes/Login.ts", "retrieved_chunk": " } catch(error) {\n sendJsonResponse(res, ERROR.invalidJSONData, 400)\n return;\n }\n await DB.init();\n const foundUser: User = await DB.getUser(parsedData.email);\n await DB.close();\n if (!foundUser) {\n sendJsonResponse(res, ERROR.userNotFound, 404);\n return;", "score": 0.8812325596809387 }, { "filename": "src/routes/Signup.ts", "retrieved_chunk": " sendJsonResponse(res, ERROR.badRequest, 400);\n return;\n }\n await DB.init();\n let foundUser = await DB.getUser(parsedData.email);\n if (foundUser) {\n sendJsonResponse(res, ERROR.userAlreadyExists, 409)\n return;\n }\n let user: User = {", "score": 0.8593226075172424 }, { "filename": "src/routes/Signup.ts", "retrieved_chunk": " parsedData = JSON.parse(data === \"\" ? '{}' : data);\n } catch {\n sendJsonResponse(res, ERROR.invalidJSONData, 400)\n return;\n }\n if (!parsedData.email || !parsedData.password) {\n sendJsonResponse(res, ERROR.badRequest, 400);\n return;\n }\n if (!isEmailValid(parsedData.email)) {", "score": 0.8490901589393616 }, { "filename": "src/routes/Issue.ts", "retrieved_chunk": " sendJsonResponse(res, ERROR.badRequest, 400);\n return;\n }\n let foundLender = await USER_DB.getUserByID(issueData.lenderid);\n let foundBook = await BOOK_DB.getBook(issueData.bookid);\n if (!foundLender || !foundBook) {\n sendJsonResponse(res, ERROR.resourceNotExists, 404);\n return;\n }\n let foundIssue = await ISSUE_DB.getIssue(", "score": 0.8489510416984558 }, { "filename": "src/routes/Issue.ts", "retrieved_chunk": " id: issueid,\n borrowerid: parsedAuthToken.id,\n lenderid: foundLender.id,\n bookid: foundBook.id,\n };\n const pushed = await ISSUE_DB.pushIssue(issueEntry);\n if (!pushed) {\n sendJsonResponse(res, ERROR.internalErr, 500);\n return;\n }", "score": 0.8485585451126099 } ]
typescript
await BOOK_DB.deleteBook(data.bookid, parsedAuthToken.id);
import BookModel from "../models/BookModel"; import Bucket from "../models/Bucket"; import Token from "../lib/GenerateToken"; import { ERROR, MAX_EPUB_SIZE_MB } from "../common/const"; import { TokStatus, Book } from "../common/types"; import { sendJsonResponse, parseSimplePostData, md5, uuid, } from "../common/utils"; import filetype from "file-type-cjs"; import fs from "node:fs"; import EPub from "epub"; import os from "node:os"; import path from "node:path"; import crypto from "node:crypto"; import { exec } from "node:child_process"; import http from "node:http"; async function getEpubCoverFromEpubFile_UNIX( epubFilepath: string ): Promise<[Buffer, string] | null> { let randomString = crypto.randomBytes(16).toString("hex"); let tempDir = path.join(os.tmpdir(), `tmp-${randomString}`); fs.mkdirSync(tempDir); let unzipCMD = `unzip -q ${epubFilepath} -d ${tempDir}`; let unzipCMDExec = new Promise((resolve, reject) => { exec(unzipCMD, (err: any, stdout: any, stderr: any) => { if (err) reject(err); resolve(stdout); }); }); try { await unzipCMDExec; } catch (err) { console.error(err); fs.rmSync(tempDir, { recursive: true }); // we r good boys! return null; } let findCMD = `find ${tempDir} -type f \\( -iname \\*.jpeg -o -iname \\*.jpg -o -iname \\*.png \\) | grep -Ei 'cover\\.|index-1_1'`; let findCMDExec: Promise<string> = new Promise((resolve, reject) => { exec(findCMD, (err: any, stdout: any, stderr: any) => { if (err) reject(err); resolve(stdout); }); }); let selectedFilePath: string; try { selectedFilePath = await findCMDExec; selectedFilePath = selectedFilePath.trim(); } catch (err) { console.error(err); fs.rmSync(tempDir, { recursive: true }); // we r good boys! return null; } let ret: [Buffer, string] = [ Buffer.from(fs.readFileSync(selectedFilePath)), selectedFilePath, ]; fs.rmSync(tempDir, { recursive: true }); // we r good boys! return ret; } export default async function ( req: http.IncomingMessage, res: http.ServerResponse ) { const BOOK_DB = new BookModel(); const BUCKET = new Bucket(); await BOOK_DB.init(); await BUCKET.init(); try { if (req.method === "GET") { try { let userBooks = await BOOK_DB.getBooks(); userBooks = userBooks.map((e) => { delete e.path; return e; }); sendJsonResponse(res, userBooks, 200); } catch (error) { console.error(error); sendJsonResponse(res, ERROR.internalErr); } } else if (req.method === "POST") { const authorization = req.headers?.authorization; const authToken = authorization?.split(" ")?.pop(); if (!authorization || !authToken) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const token = new Token(); const tokenStatus: TokStatus = token.verify(authToken); if ( tokenStatus === TokStatus.INVALID || tokenStatus === TokStatus.INVALID_SIG ) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const parsedAuthToken: any = token.UNSAFE_parse(authToken); let epubBuffer: Buffer; epubBuffer = await parseSimplePostData(req); let epubSizeInMB = Math.ceil(epubBuffer.length / 1e6); let bufferMime = await filetype.fromBuffer(epubBuffer); if (bufferMime.mime != "application/epub+zip") { sendJsonResponse(res, ERROR.invalidMimeForResource, 415); return; } if (epubSizeInMB > MAX_EPUB_SIZE_MB) { sendJsonResponse(res, ERROR.fileTooLarge, 400); return; } let randomString = crypto.randomBytes(16).toString("hex"); const tempEpubFilePath = path.join(os.tmpdir(), `tmp-${randomString}.epub`); fs.writeFileSync(tempEpubFilePath, epubBuffer); const epub: any = await new Promise((resolve, reject) => { const epub = new EPub(tempEpubFilePath); epub.on("end", () => resolve(epub)); epub.on("error", reject); epub.parse(); }); let epubCoverBuffer = await getEpubCoverFromEpubFile_UNIX(tempEpubFilePath); console.log(epubCoverBuffer); let epubSignature = md5(epubBuffer.toString("hex")); let foundBook = await BOOK_DB.getBook("", epubSignature); if (foundBook) { sendJsonResponse( res, { ...ERROR.resourceExists, data: { id: foundBook.id, }, }, 409 ); return; } let epubFilePermalink = await BUCKET.pushBufferWithName( epubBuffer, `${epubSignature}.epub` ); let epubCoverPermalink = null; if (epubCoverBuffer) { epubCoverPermalink = await BUCKET.pushBufferWithName( epubCoverBuffer[0], `${epubSignature}.${epubCoverBuffer[1].split(".").pop()}` ); } let epubID = uuid(); let
epubEntry: Book = {
id: epubID, userid: parsedAuthToken.id, title: epub.metadata?.title ?? epubID.split("-").pop(), author: epub.metadata?.creator ?? parsedAuthToken.email, path: epubFilePermalink, signature: epubSignature, cover: epubCoverPermalink, }; const pushed = await BOOK_DB.pushBook(epubEntry); if (!pushed) { sendJsonResponse(res, ERROR.internalErr, 500); return; } sendJsonResponse( res, { error: null, message: `successfully published a book of id ${epubEntry.id}`, data: { id: epubEntry.id, }, }, 201 ); } else if (req.method === "DELETE") { const authorization = req.headers?.authorization; const authToken = authorization?.split(" ")?.pop(); if (!authorization || !authToken) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const token = new Token(); const tokenStatus: TokStatus = token.verify(authToken); if ( tokenStatus === TokStatus.INVALID || tokenStatus === TokStatus.INVALID_SIG ) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const parsedAuthToken: any = token.UNSAFE_parse(authToken); let body: Buffer; body = await parseSimplePostData(req); let data: any; try { data = JSON.parse(body.toString()); } catch { sendJsonResponse(res, ERROR.invalidJSONData, 400); return; } if (!data.bookid) { sendJsonResponse(res, ERROR.badRequest, 400); return; } let bookDeleted = await BOOK_DB.deleteBook(data.bookid, parsedAuthToken.id); if (!bookDeleted) { sendJsonResponse(res, { error: "unable-to-delete-book", message: `was unable to delete book ${data.bookid}, perhaps the id was invalid?`, status: 404 }, 404) return; } sendJsonResponse(res, { error: null, message: `successfully deleted book of id ${data.bookid}`, status: 204, data: { id: data.bookid, } }, 204) } } finally { await BOOK_DB.close(); } }
src/routes/Books.ts
Aadv1k-quillia-52c5b34
[ { "filename": "src/routes/Issue.ts", "retrieved_chunk": " let targetBook = await BOOK_DB.getBook(requestedBook);\n if (!targetBook) {\n sendJsonResponse(res, ERROR.resourceNotExists, 404);\n return;\n }\n let epubResourcePath = targetBook.path;\n const response: Array<Buffer> = await new Promise((resolve, reject) => {\n https.get(epubResourcePath, (res) => {\n let data: Array<Buffer> = [];\n res.on(\"data\", (d: Buffer) => data.push(d));", "score": 0.7450343370437622 }, { "filename": "src/routes/Issue.ts", "retrieved_chunk": " res.on(\"end\", () => resolve(data));\n res.on(\"error\", (error) => reject(error));\n });\n });\n let epubBuffer = Buffer.concat(response);\n sendEpubResponse(res, epubBuffer);\n return;\n } else {\n let userIssues = await ISSUE_DB.getIssues(parsedAuthToken.id);\n if (!userIssues) {", "score": 0.7158390879631042 }, { "filename": "src/models/Bucket.ts", "retrieved_chunk": " let p = path.join(this.bucketPath, name);\n console.log(p);\n fs.writeFileSync(p, buffer);\n return p;\n }\n let response = new Promise((resolve, reject) => {\n const writeStream = this.bucket.uploader.upload_stream({\n public_id: name,\n resource_type: \"raw\",\n format: name.split('.').pop() // ideally \"unsafe\" files should not reach this point", "score": 0.7097799777984619 }, { "filename": "src/routes/Issue.ts", "retrieved_chunk": " return;\n }\n await ISSUE_DB.init();\n await BOOK_DB.init();\n await USER_DB.init();\n const parsedAuthToken: any = token.UNSAFE_parse(authToken);\n if (req.method === \"GET\") {\n let URLParams = req.url.split(\"/\").slice(3);\n let requestedBook = URLParams?.[0];\n if (requestedBook) {", "score": 0.6989666223526001 }, { "filename": "src/models/BookModel.ts", "retrieved_chunk": " [book.id, book.userid, book.author, book.title, book.path, book?.cover ?? \"\", book.signature]\n )\n return book;\n } catch (error) {\n console.error(error); \n return null;\n }\n }\n async deleteBook(bookid: string, userid?: string) {\n try {", "score": 0.6742444038391113 } ]
typescript
epubEntry: Book = {
import fs from 'fs/promises'; import inquirer from 'inquirer'; import pQueue from 'p-queue'; import { setTimeout as delay } from 'timers/promises'; import { Injectable, Logger } from '@nestjs/common'; import { Session } from '../../interfaces/session.interface'; @Injectable() export class SessionsImportService { private readonly logger = new Logger(SessionsImportService.name); private readonly readFilesQueue = new pQueue({ concurrency: 100 }); public async loadSessions(input: string[] | string) { if (!input) return []; if (!Array.isArray(input)) input = [input]; if (input.length === 0) return []; let sessions: Session[] = []; const errors: string[] = []; const readResults = await Promise.all(input.map((input) => this.readSessionsFromInput(input))); for (const result of readResults) { sessions.push(...result.values); errors.push(...result.errors); } sessions = this.removeDuplicates(sessions); if (errors.length > 0 && sessions.length > 0) { this.logger.warn(`The following session sources are invalid:\n${errors.join('\n')}`); await delay(1000); const { confirm } = await inquirer.prompt({ type: 'confirm', name: 'confirm', message: 'Continue with the valid sessions?', default: false, }); if (!confirm) throw new Error('Aborted by user'); } return sessions; } private removeDuplicates(sessions: Session[]) { const map = new Map<string, Session>();
for (const session of sessions) map.set(session.username, session);
return [...map.values()]; } private async readSessionsFromInput(input: string) { const inputType = await this.inferInputType(input); if (inputType === 'file') return this.readSessionFromFile(input); if (inputType === 'string') return { values: [], errors: [input] }; if (inputType === 'directory') return { values: [], errors: [input] }; } private async readSessionFromFile(filePath: string) { const result: { values: Session[]; errors: string[] } = { values: [], errors: [] }; try { let content = await this.readFilesQueue.add(() => fs.readFile(filePath, 'utf-8')); content = JSON.parse(content); if (content == null || typeof content !== 'object' || Array.isArray(content)) { throw new Error('Invalid session file'); } const session = Object.fromEntries( Object.entries(content).map(([key, value]) => [key[0].toLowerCase() + key.slice(1), value]), ) as unknown as Session; result.values.push(session); } catch (error) { result.errors.push(filePath); } return result; } private async inferInputType(input: string) { if (typeof input !== 'string') throw new Error(`Invalid input type: '${typeof input}'`); try { const stats = await fs.stat(input); if (stats.isFile()) return 'file'; if (stats.isDirectory()) return 'directory'; } catch (error) { return 'string'; } } }
src/modules/sessions-import/sessions-import.service.ts
Sadzurami-steam-sessions-creator-97b7294
[ { "filename": "src/modules/secrets-import/secrets-import.service.ts", "retrieved_chunk": " default: false,\n });\n if (!confirm) throw new Error('Aborted by user');\n }\n return secrets;\n }\n private removeDuplicates(secrets: Secrets[]) {\n const map = new Map<string, Secrets>();\n for (const secret of secrets) map.set(secret.username, secret);\n return [...map.values()];", "score": 0.8889971375465393 }, { "filename": "src/modules/proxies-import/proxies-import.service.ts", "retrieved_chunk": " if (!confirm) throw new Error('Aborted by user');\n }\n return proxies;\n }\n private removeDuplicates(proxies: Proxy[]) {\n const map = new Map<string, Proxy>();\n for (const proxy of proxies) map.set(proxy.toString(), proxy);\n return [...map.values()];\n }\n private async readProxyFromInput(input: string) {", "score": 0.8801262378692627 }, { "filename": "src/commands/validate/validate-sessions.service.ts", "retrieved_chunk": " const valid: Session[] = [];\n const invalid: Session[] = [];\n for (const session of sessions) {\n const { valid: isValid, errors, expires } = await this.validateSession(session);\n if (isValid) {\n valid.push(session);\n this.logger.log(\n `Valid: ${session.username}, days: ${Math.floor((expires - Date.now()) / (24 * 60 * 60 * 1000))}`,\n );\n } else {", "score": 0.8548126220703125 }, { "filename": "src/modules/accounts-import/accounts-import.service.ts", "retrieved_chunk": " if (errors.length > 0 && accounts.length > 0) {\n this.logger.warn(`The following account sources are invalid:\\n${errors.join('\\n')}`);\n await delay(1000);\n const { confirm } = await inquirer.prompt({\n type: 'confirm',\n name: 'confirm',\n message: 'Continue with the valid accounts?',\n default: false,\n });\n if (!confirm) throw new Error('Aborted by user');", "score": 0.8420775532722473 }, { "filename": "src/commands/create/create-sessions.service.ts", "retrieved_chunk": " constructor(private readonly steamTokensService: SteamTokensService, private readonly configService: ConfigService) {}\n public onModuleInit() {\n this.schemaVersion = this.configService.getOrThrow<number>('session.schemaVersion');\n }\n public async createSession(account: Account) {\n try {\n // we need to wait at least 30 seconds between each refresh token creation\n // because steam has a limit of logins for one account once per 30 seconds\n // probably it's fair only for accounts with 2FA enabled\n const delayMs = 1000 * 31;", "score": 0.83067786693573 } ]
typescript
for (const session of sessions) map.set(session.username, session);
import BookModel from "../models/BookModel"; import Bucket from "../models/Bucket"; import Token from "../lib/GenerateToken"; import { ERROR, MAX_EPUB_SIZE_MB } from "../common/const"; import { TokStatus, Book } from "../common/types"; import { sendJsonResponse, parseSimplePostData, md5, uuid, } from "../common/utils"; import filetype from "file-type-cjs"; import fs from "node:fs"; import EPub from "epub"; import os from "node:os"; import path from "node:path"; import crypto from "node:crypto"; import { exec } from "node:child_process"; import http from "node:http"; async function getEpubCoverFromEpubFile_UNIX( epubFilepath: string ): Promise<[Buffer, string] | null> { let randomString = crypto.randomBytes(16).toString("hex"); let tempDir = path.join(os.tmpdir(), `tmp-${randomString}`); fs.mkdirSync(tempDir); let unzipCMD = `unzip -q ${epubFilepath} -d ${tempDir}`; let unzipCMDExec = new Promise((resolve, reject) => { exec(unzipCMD, (err: any, stdout: any, stderr: any) => { if (err) reject(err); resolve(stdout); }); }); try { await unzipCMDExec; } catch (err) { console.error(err); fs.rmSync(tempDir, { recursive: true }); // we r good boys! return null; } let findCMD = `find ${tempDir} -type f \\( -iname \\*.jpeg -o -iname \\*.jpg -o -iname \\*.png \\) | grep -Ei 'cover\\.|index-1_1'`; let findCMDExec: Promise<string> = new Promise((resolve, reject) => { exec(findCMD, (err: any, stdout: any, stderr: any) => { if (err) reject(err); resolve(stdout); }); }); let selectedFilePath: string; try { selectedFilePath = await findCMDExec; selectedFilePath = selectedFilePath.trim(); } catch (err) { console.error(err); fs.rmSync(tempDir, { recursive: true }); // we r good boys! return null; } let ret: [Buffer, string] = [ Buffer.from(fs.readFileSync(selectedFilePath)), selectedFilePath, ]; fs.rmSync(tempDir, { recursive: true }); // we r good boys! return ret; } export default async function ( req: http.IncomingMessage, res: http.ServerResponse ) { const BOOK_DB = new BookModel(); const BUCKET = new Bucket(); await BOOK_DB.init(); await BUCKET.init(); try { if (req.method === "GET") { try { let userBooks = await BOOK_DB.getBooks(); userBooks = userBooks.map((e) => { delete e.path; return e; }); sendJsonResponse(res, userBooks, 200); } catch (error) { console.error(error); sendJsonResponse(res, ERROR.internalErr); } } else if (req.method === "POST") { const authorization = req.headers?.authorization; const authToken = authorization?.split(" ")?.pop(); if (!authorization || !authToken) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const token = new Token(); const tokenStatus: TokStatus = token.verify(authToken); if ( tokenStatus === TokStatus.INVALID || tokenStatus === TokStatus.INVALID_SIG ) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const parsedAuthToken: any = token.UNSAFE_parse(authToken); let epubBuffer: Buffer; epubBuffer = await parseSimplePostData(req); let epubSizeInMB = Math.ceil(epubBuffer.length / 1e6); let bufferMime = await filetype.fromBuffer(epubBuffer); if (bufferMime.mime != "application/epub+zip") { sendJsonResponse(res, ERROR.invalidMimeForResource, 415); return; } if (epubSizeInMB > MAX_EPUB_SIZE_MB) { sendJsonResponse(res, ERROR.fileTooLarge, 400); return; } let randomString = crypto.randomBytes(16).toString("hex"); const tempEpubFilePath = path.join(os.tmpdir(), `tmp-${randomString}.epub`); fs.writeFileSync(tempEpubFilePath, epubBuffer); const epub: any = await new Promise((resolve, reject) => { const epub = new EPub(tempEpubFilePath); epub.on("end", () => resolve(epub)); epub.on("error", reject); epub.parse(); }); let epubCoverBuffer = await getEpubCoverFromEpubFile_UNIX(tempEpubFilePath); console.log(epubCoverBuffer); let epubSignature = md5(epubBuffer.toString("hex")); let foundBook = await BOOK_DB.getBook("", epubSignature); if (foundBook) { sendJsonResponse( res, { ...ERROR.resourceExists, data: { id: foundBook.id, }, }, 409 ); return; } let epubFilePermalink = await BUCKET.pushBufferWithName( epubBuffer, `${epubSignature}.epub` ); let epubCoverPermalink = null; if (epubCoverBuffer) { epubCoverPermalink = await BUCKET.pushBufferWithName( epubCoverBuffer[0], `${epubSignature}.${epubCoverBuffer[1].split(".").pop()}` ); } let epubID = uuid(); let epubEntry: Book = { id: epubID, userid: parsedAuthToken.id, title: epub.metadata?.title ?? epubID.split("-").pop(), author: epub.metadata?.creator ?? parsedAuthToken.email, path: epubFilePermalink, signature: epubSignature, cover: epubCoverPermalink, }; const pushed = await BOOK_DB.pushBook(epubEntry); if (!pushed) { sendJsonResponse(res, ERROR.internalErr, 500); return; } sendJsonResponse( res, { error: null, message: `successfully published a book of id ${epubEntry.id}`, data: { id: epubEntry.id, }, }, 201 ); } else if (req.method === "DELETE") { const authorization = req.headers?.authorization; const authToken = authorization?.split(" ")?.pop(); if (!authorization || !authToken) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const token = new Token(); const tokenStatus: TokStatus = token.verify(authToken); if ( tokenStatus === TokStatus.INVALID || tokenStatus === TokStatus.INVALID_SIG ) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const parsedAuthToken: any = token.UNSAFE_parse(authToken); let body: Buffer; body = await parseSimplePostData(req); let data: any; try { data = JSON.parse(body.toString()); } catch {
sendJsonResponse(res, ERROR.invalidJSONData, 400);
return; } if (!data.bookid) { sendJsonResponse(res, ERROR.badRequest, 400); return; } let bookDeleted = await BOOK_DB.deleteBook(data.bookid, parsedAuthToken.id); if (!bookDeleted) { sendJsonResponse(res, { error: "unable-to-delete-book", message: `was unable to delete book ${data.bookid}, perhaps the id was invalid?`, status: 404 }, 404) return; } sendJsonResponse(res, { error: null, message: `successfully deleted book of id ${data.bookid}`, status: 204, data: { id: data.bookid, } }, 204) } } finally { await BOOK_DB.close(); } }
src/routes/Books.ts
Aadv1k-quillia-52c5b34
[ { "filename": "src/routes/Login.ts", "retrieved_chunk": " const DB = new UserModel();\n let data: any = await parseSimplePostData(req);\n data = data.toString();\n if (req.method !== \"POST\") {\n sendJsonResponse(res, ERROR.methodNotAllowed, 405);\n return;\n }\n let parsedData: User;\n try {\n parsedData = JSON.parse(data);", "score": 0.914134681224823 }, { "filename": "src/routes/Signup.ts", "retrieved_chunk": ") {\n const DB = new UserModel();\n if (req.method !== \"POST\") {\n sendJsonResponse(res, ERROR.methodNotAllowed, 405);\n return;\n }\n let data: any = await parseSimplePostData(req);\n data = data.toString();\n let parsedData: User;\n try {", "score": 0.9105095863342285 }, { "filename": "src/routes/Issue.ts", "retrieved_chunk": " sendJsonResponse(res, ERROR.unauthorized, 401);\n return;\n }\n const token = new Token();\n const tokenStatus: TokStatus = token.verify(authToken);\n if (\n tokenStatus === TokStatus.INVALID ||\n tokenStatus === TokStatus.INVALID_SIG\n ) {\n sendJsonResponse(res, ERROR.unauthorized, 401);", "score": 0.8531969785690308 }, { "filename": "src/routes/Issue.ts", "retrieved_chunk": " let issueData: Issue;\n try {\n let issuePostData = await parseSimplePostData(req);\n issueData = JSON.parse(issuePostData.toString());\n } catch (error) {\n console.error(error);\n sendJsonResponse(res, ERROR.badRequest, 400);\n return;\n }\n if (!issueData.lenderid || !issueData.bookid) {", "score": 0.8416948914527893 }, { "filename": "src/routes/Signup.ts", "retrieved_chunk": " parsedData = JSON.parse(data === \"\" ? '{}' : data);\n } catch {\n sendJsonResponse(res, ERROR.invalidJSONData, 400)\n return;\n }\n if (!parsedData.email || !parsedData.password) {\n sendJsonResponse(res, ERROR.badRequest, 400);\n return;\n }\n if (!isEmailValid(parsedData.email)) {", "score": 0.8405274152755737 } ]
typescript
sendJsonResponse(res, ERROR.invalidJSONData, 400);
import IssueModel from "../models/IssueModel"; import BookModel from "../models/BookModel"; import UserModel from "../models/UserModel"; import Token from "../lib/GenerateToken"; import { ERROR } from "../common/const"; import { TokStatus, Issue } from "../common/types"; import { sendJsonResponse, sendEpubResponse, parseSimplePostData, uuid, getBufferFromRawURL, } from "../common/utils"; import http from "node:http"; import https from "node:https"; export default async function ( req: http.IncomingMessage, res: http.ServerResponse ) { const ISSUE_DB = new IssueModel(); const BOOK_DB = new BookModel(); const USER_DB = new UserModel(); const authorization = req.headers?.authorization; const authToken = authorization?.split(" ")?.pop()?.trim(); try { if (req.method === "OPTIONS") { sendJsonResponse(res, {}, 200); return; } if (!authorization || !authToken) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const token = new Token(); const tokenStatus: TokStatus = token.verify(authToken); if ( tokenStatus === TokStatus.INVALID || tokenStatus === TokStatus.INVALID_SIG ) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } await ISSUE_DB.init(); await BOOK_DB.init(); await USER_DB.init(); const parsedAuthToken: any = token.UNSAFE_parse(authToken); if (req.method === "GET") { let URLParams = req.url.split("/").slice(3); let requestedBook = URLParams?.[0]; if (requestedBook) { let
targetBook = await BOOK_DB.getBook(requestedBook);
if (!targetBook) { sendJsonResponse(res, ERROR.resourceNotExists, 404); return; } let epubResourcePath = targetBook.path; const response: Array<Buffer> = await new Promise((resolve, reject) => { https.get(epubResourcePath, (res) => { let data: Array<Buffer> = []; res.on("data", (d: Buffer) => data.push(d)); res.on("end", () => resolve(data)); res.on("error", (error) => reject(error)); }); }); let epubBuffer = Buffer.concat(response); sendEpubResponse(res, epubBuffer); return; } else { let userIssues = await ISSUE_DB.getIssues(parsedAuthToken.id); if (!userIssues) { sendJsonResponse(res, ERROR.resourceNotExists, 404); } else { sendJsonResponse(res, userIssues, 200); } } } else if (req.method === "POST") { if (req.headers?.["content-type"] != "application/json") { sendJsonResponse(res, ERROR.invalidMimeForResource, 415); return; } let issueData: Issue; try { let issuePostData = await parseSimplePostData(req); issueData = JSON.parse(issuePostData.toString()); } catch (error) { console.error(error); sendJsonResponse(res, ERROR.badRequest, 400); return; } if (!issueData.lenderid || !issueData.bookid) { sendJsonResponse(res, ERROR.badRequest, 400); return; } let foundLender = await USER_DB.getUserByID(issueData.lenderid); let foundBook = await BOOK_DB.getBook(issueData.bookid); if (!foundLender || !foundBook) { sendJsonResponse(res, ERROR.resourceNotExists, 404); return; } let foundIssue = await ISSUE_DB.getIssue( foundLender.id, foundBook.id, parsedAuthToken.id ); if (foundIssue) { sendJsonResponse( res, { ...ERROR.resourceExists, data: { id: foundIssue.id, bookid: foundIssue.bookid, }, }, 409 ); return; } let issueid = uuid(); let issueEntry: Issue = { id: issueid, borrowerid: parsedAuthToken.id, lenderid: foundLender.id, bookid: foundBook.id, }; const pushed = await ISSUE_DB.pushIssue(issueEntry); if (!pushed) { sendJsonResponse(res, ERROR.internalErr, 500); return; } sendJsonResponse( res, { error: null, message: `successfully created a new issue of id ${issueEntry.id}`, data: { id: pushed.id, borrower: pushed.borrowerid, lender: pushed.lenderid, book: foundBook.title, }, }, 201 ); } } finally { await ISSUE_DB.close(); await BOOK_DB.close(); await USER_DB.close(); } }
src/routes/Issue.ts
Aadv1k-quillia-52c5b34
[ { "filename": "src/routes/Books.ts", "retrieved_chunk": " try {\n if (req.method === \"GET\") {\n try {\n let userBooks = await BOOK_DB.getBooks();\n userBooks = userBooks.map((e) => {\n delete e.path;\n return e;\n });\n sendJsonResponse(res, userBooks, 200);\n } catch (error) {", "score": 0.7909144163131714 }, { "filename": "src/routes/Books.ts", "retrieved_chunk": " return ret;\n}\nexport default async function (\n req: http.IncomingMessage,\n res: http.ServerResponse\n) {\n const BOOK_DB = new BookModel();\n const BUCKET = new Bucket();\n await BOOK_DB.init();\n await BUCKET.init();", "score": 0.7827035784721375 }, { "filename": "src/routes/Signup.ts", "retrieved_chunk": ") {\n const DB = new UserModel();\n if (req.method !== \"POST\") {\n sendJsonResponse(res, ERROR.methodNotAllowed, 405);\n return;\n }\n let data: any = await parseSimplePostData(req);\n data = data.toString();\n let parsedData: User;\n try {", "score": 0.7569898366928101 }, { "filename": "src/routes/Books.ts", "retrieved_chunk": " id: epubEntry.id,\n },\n },\n 201\n );\n } else if (req.method === \"DELETE\") {\n const authorization = req.headers?.authorization;\n const authToken = authorization?.split(\" \")?.pop();\n if (!authorization || !authToken) {\n sendJsonResponse(res, ERROR.unauthorized, 401);", "score": 0.7513793110847473 }, { "filename": "src/routes/Books.ts", "retrieved_chunk": " }\n const parsedAuthToken: any = token.UNSAFE_parse(authToken);\n let body: Buffer;\n body = await parseSimplePostData(req);\n let data: any;\n try {\n data = JSON.parse(body.toString());\n } catch {\n sendJsonResponse(res, ERROR.invalidJSONData, 400);\n return;", "score": 0.750464677810669 } ]
typescript
targetBook = await BOOK_DB.getBook(requestedBook);
import BookModel from "../models/BookModel"; import Bucket from "../models/Bucket"; import Token from "../lib/GenerateToken"; import { ERROR, MAX_EPUB_SIZE_MB } from "../common/const"; import { TokStatus, Book } from "../common/types"; import { sendJsonResponse, parseSimplePostData, md5, uuid, } from "../common/utils"; import filetype from "file-type-cjs"; import fs from "node:fs"; import EPub from "epub"; import os from "node:os"; import path from "node:path"; import crypto from "node:crypto"; import { exec } from "node:child_process"; import http from "node:http"; async function getEpubCoverFromEpubFile_UNIX( epubFilepath: string ): Promise<[Buffer, string] | null> { let randomString = crypto.randomBytes(16).toString("hex"); let tempDir = path.join(os.tmpdir(), `tmp-${randomString}`); fs.mkdirSync(tempDir); let unzipCMD = `unzip -q ${epubFilepath} -d ${tempDir}`; let unzipCMDExec = new Promise((resolve, reject) => { exec(unzipCMD, (err: any, stdout: any, stderr: any) => { if (err) reject(err); resolve(stdout); }); }); try { await unzipCMDExec; } catch (err) { console.error(err); fs.rmSync(tempDir, { recursive: true }); // we r good boys! return null; } let findCMD = `find ${tempDir} -type f \\( -iname \\*.jpeg -o -iname \\*.jpg -o -iname \\*.png \\) | grep -Ei 'cover\\.|index-1_1'`; let findCMDExec: Promise<string> = new Promise((resolve, reject) => { exec(findCMD, (err: any, stdout: any, stderr: any) => { if (err) reject(err); resolve(stdout); }); }); let selectedFilePath: string; try { selectedFilePath = await findCMDExec; selectedFilePath = selectedFilePath.trim(); } catch (err) { console.error(err); fs.rmSync(tempDir, { recursive: true }); // we r good boys! return null; } let ret: [Buffer, string] = [ Buffer.from(fs.readFileSync(selectedFilePath)), selectedFilePath, ]; fs.rmSync(tempDir, { recursive: true }); // we r good boys! return ret; } export default async function ( req: http.IncomingMessage, res: http.ServerResponse ) { const BOOK_DB = new BookModel(); const BUCKET = new Bucket(); await BOOK_DB.init(); await BUCKET.init(); try { if (req.method === "GET") { try { let userBooks = await BOOK_DB.getBooks(); userBooks = userBooks.map((e) => { delete e.path; return e; }); sendJsonResponse(res, userBooks, 200); } catch (error) { console.error(error); sendJsonResponse(res, ERROR.internalErr); } } else if (req.method === "POST") { const authorization = req.headers?.authorization; const authToken = authorization?.split(" ")?.pop(); if (!authorization || !authToken) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const token = new Token(); const tokenStatus: TokStatus = token.verify(authToken); if ( tokenStatus === TokStatus.INVALID || tokenStatus === TokStatus.INVALID_SIG ) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const parsedAuthToken: any = token.UNSAFE_parse(authToken); let epubBuffer: Buffer; epubBuffer = await parseSimplePostData(req); let epubSizeInMB = Math.ceil(epubBuffer.length / 1e6); let bufferMime = await filetype.fromBuffer(epubBuffer); if (bufferMime.mime != "application/epub+zip") { sendJsonResponse(res, ERROR.invalidMimeForResource, 415); return; } if (epubSizeInMB > MAX_EPUB_SIZE_MB) { sendJsonResponse(res, ERROR.fileTooLarge, 400); return; } let randomString = crypto.randomBytes(16).toString("hex"); const tempEpubFilePath = path.join(os.tmpdir(), `tmp-${randomString}.epub`); fs.writeFileSync(tempEpubFilePath, epubBuffer); const epub: any = await new Promise((resolve, reject) => { const epub = new EPub(tempEpubFilePath); epub.on("end", () => resolve(epub)); epub.on("error", reject); epub.parse(); }); let epubCoverBuffer = await getEpubCoverFromEpubFile_UNIX(tempEpubFilePath); console.log(epubCoverBuffer); let epubSignature = md5(epubBuffer.toString("hex")); let foundBook = await BOOK_DB.getBook("", epubSignature); if (foundBook) { sendJsonResponse( res, { ...ERROR.resourceExists, data: { id: foundBook.id, }, }, 409 ); return; }
let epubFilePermalink = await BUCKET.pushBufferWithName( epubBuffer, `${epubSignature}.epub` );
let epubCoverPermalink = null; if (epubCoverBuffer) { epubCoverPermalink = await BUCKET.pushBufferWithName( epubCoverBuffer[0], `${epubSignature}.${epubCoverBuffer[1].split(".").pop()}` ); } let epubID = uuid(); let epubEntry: Book = { id: epubID, userid: parsedAuthToken.id, title: epub.metadata?.title ?? epubID.split("-").pop(), author: epub.metadata?.creator ?? parsedAuthToken.email, path: epubFilePermalink, signature: epubSignature, cover: epubCoverPermalink, }; const pushed = await BOOK_DB.pushBook(epubEntry); if (!pushed) { sendJsonResponse(res, ERROR.internalErr, 500); return; } sendJsonResponse( res, { error: null, message: `successfully published a book of id ${epubEntry.id}`, data: { id: epubEntry.id, }, }, 201 ); } else if (req.method === "DELETE") { const authorization = req.headers?.authorization; const authToken = authorization?.split(" ")?.pop(); if (!authorization || !authToken) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const token = new Token(); const tokenStatus: TokStatus = token.verify(authToken); if ( tokenStatus === TokStatus.INVALID || tokenStatus === TokStatus.INVALID_SIG ) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const parsedAuthToken: any = token.UNSAFE_parse(authToken); let body: Buffer; body = await parseSimplePostData(req); let data: any; try { data = JSON.parse(body.toString()); } catch { sendJsonResponse(res, ERROR.invalidJSONData, 400); return; } if (!data.bookid) { sendJsonResponse(res, ERROR.badRequest, 400); return; } let bookDeleted = await BOOK_DB.deleteBook(data.bookid, parsedAuthToken.id); if (!bookDeleted) { sendJsonResponse(res, { error: "unable-to-delete-book", message: `was unable to delete book ${data.bookid}, perhaps the id was invalid?`, status: 404 }, 404) return; } sendJsonResponse(res, { error: null, message: `successfully deleted book of id ${data.bookid}`, status: 204, data: { id: data.bookid, } }, 204) } } finally { await BOOK_DB.close(); } }
src/routes/Books.ts
Aadv1k-quillia-52c5b34
[ { "filename": "src/routes/Issue.ts", "retrieved_chunk": " res.on(\"end\", () => resolve(data));\n res.on(\"error\", (error) => reject(error));\n });\n });\n let epubBuffer = Buffer.concat(response);\n sendEpubResponse(res, epubBuffer);\n return;\n } else {\n let userIssues = await ISSUE_DB.getIssues(parsedAuthToken.id);\n if (!userIssues) {", "score": 0.7928486466407776 }, { "filename": "src/routes/Issue.ts", "retrieved_chunk": " let targetBook = await BOOK_DB.getBook(requestedBook);\n if (!targetBook) {\n sendJsonResponse(res, ERROR.resourceNotExists, 404);\n return;\n }\n let epubResourcePath = targetBook.path;\n const response: Array<Buffer> = await new Promise((resolve, reject) => {\n https.get(epubResourcePath, (res) => {\n let data: Array<Buffer> = [];\n res.on(\"data\", (d: Buffer) => data.push(d));", "score": 0.7561554908752441 }, { "filename": "src/models/Bucket.ts", "retrieved_chunk": " let p = path.join(this.bucketPath, name);\n console.log(p);\n fs.writeFileSync(p, buffer);\n return p;\n }\n let response = new Promise((resolve, reject) => {\n const writeStream = this.bucket.uploader.upload_stream({\n public_id: name,\n resource_type: \"raw\",\n format: name.split('.').pop() // ideally \"unsafe\" files should not reach this point", "score": 0.7520655393600464 }, { "filename": "src/routes/Issue.ts", "retrieved_chunk": " id: foundIssue.id,\n bookid: foundIssue.bookid,\n },\n },\n 409\n );\n return;\n }\n let issueid = uuid();\n let issueEntry: Issue = {", "score": 0.7487258911132812 }, { "filename": "src/common/utils.ts", "retrieved_chunk": " })\n })\n let buffer = Buffer.concat(buffArr);\n return buffer;\n } catch (err) {\n console.error(err);\n return null;\n }\n} \nexport function sendHtmlResponse(res: ServerResponse, html: string, status: number = 200) {", "score": 0.7171758413314819 } ]
typescript
let epubFilePermalink = await BUCKET.pushBufferWithName( epubBuffer, `${epubSignature}.epub` );
import BookModel from "../models/BookModel"; import Bucket from "../models/Bucket"; import Token from "../lib/GenerateToken"; import { ERROR, MAX_EPUB_SIZE_MB } from "../common/const"; import { TokStatus, Book } from "../common/types"; import { sendJsonResponse, parseSimplePostData, md5, uuid, } from "../common/utils"; import filetype from "file-type-cjs"; import fs from "node:fs"; import EPub from "epub"; import os from "node:os"; import path from "node:path"; import crypto from "node:crypto"; import { exec } from "node:child_process"; import http from "node:http"; async function getEpubCoverFromEpubFile_UNIX( epubFilepath: string ): Promise<[Buffer, string] | null> { let randomString = crypto.randomBytes(16).toString("hex"); let tempDir = path.join(os.tmpdir(), `tmp-${randomString}`); fs.mkdirSync(tempDir); let unzipCMD = `unzip -q ${epubFilepath} -d ${tempDir}`; let unzipCMDExec = new Promise((resolve, reject) => { exec(unzipCMD, (err: any, stdout: any, stderr: any) => { if (err) reject(err); resolve(stdout); }); }); try { await unzipCMDExec; } catch (err) { console.error(err); fs.rmSync(tempDir, { recursive: true }); // we r good boys! return null; } let findCMD = `find ${tempDir} -type f \\( -iname \\*.jpeg -o -iname \\*.jpg -o -iname \\*.png \\) | grep -Ei 'cover\\.|index-1_1'`; let findCMDExec: Promise<string> = new Promise((resolve, reject) => { exec(findCMD, (err: any, stdout: any, stderr: any) => { if (err) reject(err); resolve(stdout); }); }); let selectedFilePath: string; try { selectedFilePath = await findCMDExec; selectedFilePath = selectedFilePath.trim(); } catch (err) { console.error(err); fs.rmSync(tempDir, { recursive: true }); // we r good boys! return null; } let ret: [Buffer, string] = [ Buffer.from(fs.readFileSync(selectedFilePath)), selectedFilePath, ]; fs.rmSync(tempDir, { recursive: true }); // we r good boys! return ret; } export default async function ( req: http.IncomingMessage, res: http.ServerResponse ) { const BOOK_DB = new BookModel(); const BUCKET = new Bucket(); await BOOK_DB.init(); await BUCKET.init(); try { if (req.method === "GET") { try { let userBooks = await BOOK_DB.getBooks(); userBooks = userBooks.map((e) => { delete e.path; return e; }); sendJsonResponse(res, userBooks, 200); } catch (error) { console.error(error); sendJsonResponse(res, ERROR.internalErr); } } else if (req.method === "POST") { const authorization = req.headers?.authorization; const authToken = authorization?.split(" ")?.pop(); if (!authorization || !authToken) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const token = new Token(); const tokenStatus: TokStatus = token.verify(authToken); if ( tokenStatus === TokStatus.INVALID || tokenStatus === TokStatus.INVALID_SIG ) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const parsedAuthToken: any = token.UNSAFE_parse(authToken); let epubBuffer: Buffer; epubBuffer = await parseSimplePostData(req); let epubSizeInMB = Math.ceil(epubBuffer.length / 1e6); let bufferMime = await filetype.fromBuffer(epubBuffer); if (bufferMime.mime != "application/epub+zip") { sendJsonResponse(res, ERROR.invalidMimeForResource, 415); return; } if (epubSizeInMB > MAX_EPUB_SIZE_MB) { sendJsonResponse(res, ERROR.fileTooLarge, 400); return; } let randomString = crypto.randomBytes(16).toString("hex"); const tempEpubFilePath = path.join(os.tmpdir(), `tmp-${randomString}.epub`); fs.writeFileSync(tempEpubFilePath, epubBuffer); const epub: any = await new Promise((resolve, reject) => { const epub = new EPub(tempEpubFilePath); epub.on("end", () => resolve(epub)); epub.on("error", reject); epub.parse(); }); let epubCoverBuffer = await getEpubCoverFromEpubFile_UNIX(tempEpubFilePath); console.log(epubCoverBuffer); let epubSignature = md5(epubBuffer.toString("hex")); let foundBook = await BOOK_DB.getBook("", epubSignature); if (foundBook) { sendJsonResponse( res, { ...ERROR.resourceExists, data: { id: foundBook.id, }, }, 409 ); return; } let epubFilePermalink = await BUCKET.pushBufferWithName( epubBuffer, `${epubSignature}.epub` ); let epubCoverPermalink = null; if (epubCoverBuffer) { epubCoverPermalink = await BUCKET.pushBufferWithName( epubCoverBuffer[0], `${epubSignature}.${epubCoverBuffer[1].split(".").pop()}` ); } let epubID = uuid(); let epubEntry: Book = { id: epubID, userid: parsedAuthToken.id, title: epub.metadata?.title ?? epubID.split("-").pop(), author: epub.metadata?.creator ?? parsedAuthToken.email, path: epubFilePermalink, signature: epubSignature, cover: epubCoverPermalink, };
const pushed = await BOOK_DB.pushBook(epubEntry);
if (!pushed) { sendJsonResponse(res, ERROR.internalErr, 500); return; } sendJsonResponse( res, { error: null, message: `successfully published a book of id ${epubEntry.id}`, data: { id: epubEntry.id, }, }, 201 ); } else if (req.method === "DELETE") { const authorization = req.headers?.authorization; const authToken = authorization?.split(" ")?.pop(); if (!authorization || !authToken) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const token = new Token(); const tokenStatus: TokStatus = token.verify(authToken); if ( tokenStatus === TokStatus.INVALID || tokenStatus === TokStatus.INVALID_SIG ) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const parsedAuthToken: any = token.UNSAFE_parse(authToken); let body: Buffer; body = await parseSimplePostData(req); let data: any; try { data = JSON.parse(body.toString()); } catch { sendJsonResponse(res, ERROR.invalidJSONData, 400); return; } if (!data.bookid) { sendJsonResponse(res, ERROR.badRequest, 400); return; } let bookDeleted = await BOOK_DB.deleteBook(data.bookid, parsedAuthToken.id); if (!bookDeleted) { sendJsonResponse(res, { error: "unable-to-delete-book", message: `was unable to delete book ${data.bookid}, perhaps the id was invalid?`, status: 404 }, 404) return; } sendJsonResponse(res, { error: null, message: `successfully deleted book of id ${data.bookid}`, status: 204, data: { id: data.bookid, } }, 204) } } finally { await BOOK_DB.close(); } }
src/routes/Books.ts
Aadv1k-quillia-52c5b34
[ { "filename": "src/models/BookModel.ts", "retrieved_chunk": " [book.id, book.userid, book.author, book.title, book.path, book?.cover ?? \"\", book.signature]\n )\n return book;\n } catch (error) {\n console.error(error); \n return null;\n }\n }\n async deleteBook(bookid: string, userid?: string) {\n try {", "score": 0.7317772507667542 }, { "filename": "src/routes/Issue.ts", "retrieved_chunk": " id: issueid,\n borrowerid: parsedAuthToken.id,\n lenderid: foundLender.id,\n bookid: foundBook.id,\n };\n const pushed = await ISSUE_DB.pushIssue(issueEntry);\n if (!pushed) {\n sendJsonResponse(res, ERROR.internalErr, 500);\n return;\n }", "score": 0.7157102227210999 }, { "filename": "src/routes/Issue.ts", "retrieved_chunk": " sendJsonResponse(\n res,\n {\n error: null,\n message: `successfully created a new issue of id ${issueEntry.id}`,\n data: {\n id: pushed.id,\n borrower: pushed.borrowerid,\n lender: pushed.lenderid,\n book: foundBook.title,", "score": 0.7124941349029541 }, { "filename": "src/routes/Issue.ts", "retrieved_chunk": " let targetBook = await BOOK_DB.getBook(requestedBook);\n if (!targetBook) {\n sendJsonResponse(res, ERROR.resourceNotExists, 404);\n return;\n }\n let epubResourcePath = targetBook.path;\n const response: Array<Buffer> = await new Promise((resolve, reject) => {\n https.get(epubResourcePath, (res) => {\n let data: Array<Buffer> = [];\n res.on(\"data\", (d: Buffer) => data.push(d));", "score": 0.6968203186988831 }, { "filename": "src/routes/Issue.ts", "retrieved_chunk": " return;\n }\n await ISSUE_DB.init();\n await BOOK_DB.init();\n await USER_DB.init();\n const parsedAuthToken: any = token.UNSAFE_parse(authToken);\n if (req.method === \"GET\") {\n let URLParams = req.url.split(\"/\").slice(3);\n let requestedBook = URLParams?.[0];\n if (requestedBook) {", "score": 0.6945744752883911 } ]
typescript
const pushed = await BOOK_DB.pushBook(epubEntry);
import IssueModel from "../models/IssueModel"; import BookModel from "../models/BookModel"; import UserModel from "../models/UserModel"; import Token from "../lib/GenerateToken"; import { ERROR } from "../common/const"; import { TokStatus, Issue } from "../common/types"; import { sendJsonResponse, sendEpubResponse, parseSimplePostData, uuid, getBufferFromRawURL, } from "../common/utils"; import http from "node:http"; import https from "node:https"; export default async function ( req: http.IncomingMessage, res: http.ServerResponse ) { const ISSUE_DB = new IssueModel(); const BOOK_DB = new BookModel(); const USER_DB = new UserModel(); const authorization = req.headers?.authorization; const authToken = authorization?.split(" ")?.pop()?.trim(); try { if (req.method === "OPTIONS") { sendJsonResponse(res, {}, 200); return; } if (!authorization || !authToken) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } const token = new Token(); const tokenStatus: TokStatus = token.verify(authToken); if ( tokenStatus === TokStatus.INVALID || tokenStatus === TokStatus.INVALID_SIG ) { sendJsonResponse(res, ERROR.unauthorized, 401); return; } await ISSUE_DB.init(); await BOOK_DB.init(); await USER_DB.init(); const parsedAuthToken: any = token.UNSAFE_parse(authToken); if (req.method === "GET") { let URLParams = req.url.split("/").slice(3); let requestedBook = URLParams?.[0]; if (requestedBook) { let targetBook = await BOOK_DB.getBook(requestedBook); if (!targetBook) { sendJsonResponse(res, ERROR.resourceNotExists, 404); return; } let epubResourcePath = targetBook.path; const response: Array<Buffer> = await new Promise((resolve, reject) => { https.get(epubResourcePath, (res) => { let data: Array<Buffer> = []; res.on("data", (d: Buffer) => data.push(d)); res.on("end", () => resolve(data)); res.on("error", (error) => reject(error)); }); }); let epubBuffer = Buffer.concat(response); sendEpubResponse(res, epubBuffer); return; } else {
let userIssues = await ISSUE_DB.getIssues(parsedAuthToken.id);
if (!userIssues) { sendJsonResponse(res, ERROR.resourceNotExists, 404); } else { sendJsonResponse(res, userIssues, 200); } } } else if (req.method === "POST") { if (req.headers?.["content-type"] != "application/json") { sendJsonResponse(res, ERROR.invalidMimeForResource, 415); return; } let issueData: Issue; try { let issuePostData = await parseSimplePostData(req); issueData = JSON.parse(issuePostData.toString()); } catch (error) { console.error(error); sendJsonResponse(res, ERROR.badRequest, 400); return; } if (!issueData.lenderid || !issueData.bookid) { sendJsonResponse(res, ERROR.badRequest, 400); return; } let foundLender = await USER_DB.getUserByID(issueData.lenderid); let foundBook = await BOOK_DB.getBook(issueData.bookid); if (!foundLender || !foundBook) { sendJsonResponse(res, ERROR.resourceNotExists, 404); return; } let foundIssue = await ISSUE_DB.getIssue( foundLender.id, foundBook.id, parsedAuthToken.id ); if (foundIssue) { sendJsonResponse( res, { ...ERROR.resourceExists, data: { id: foundIssue.id, bookid: foundIssue.bookid, }, }, 409 ); return; } let issueid = uuid(); let issueEntry: Issue = { id: issueid, borrowerid: parsedAuthToken.id, lenderid: foundLender.id, bookid: foundBook.id, }; const pushed = await ISSUE_DB.pushIssue(issueEntry); if (!pushed) { sendJsonResponse(res, ERROR.internalErr, 500); return; } sendJsonResponse( res, { error: null, message: `successfully created a new issue of id ${issueEntry.id}`, data: { id: pushed.id, borrower: pushed.borrowerid, lender: pushed.lenderid, book: foundBook.title, }, }, 201 ); } } finally { await ISSUE_DB.close(); await BOOK_DB.close(); await USER_DB.close(); } }
src/routes/Issue.ts
Aadv1k-quillia-52c5b34
[ { "filename": "src/common/utils.ts", "retrieved_chunk": " resolve([fields, files]);\n })\n })\n}\nexport function parseSimplePostData(req: http.IncomingMessage): Promise<Buffer> {\n return new Promise((resolve, reject) => {\n let data: Buffer[] = [];\n req.on(\"data\", (chunk: Buffer) => data.push(chunk))\n req.on(\"end\", () => { \n const buf = Buffer.concat(data);", "score": 0.816415548324585 }, { "filename": "src/models/Bucket.ts", "retrieved_chunk": " }, (error: any, result: any) => {\n if (error) reject(error);\n resolve(result);\n })\n stream.pipe(writeStream);\n })\n try {\n let data: any = await response;\n return data.secure_url;\n } catch (error) {", "score": 0.8140221834182739 }, { "filename": "src/common/utils.ts", "retrieved_chunk": "export async function getBufferFromRawURL(resourceUrl: string): Promise<Buffer | null> {\n let url = new URL(resourceUrl);\n try {\n let buffArr: Buffer[] = await new Promise((resolve, reject) => {\n let func = url.protocol === \"https:\" ? https : http;\n func.get(url, (res) => {\n let data: Buffer[] = [];\n res.on(\"data\", (d: Buffer) => data.push(d))\n res.on(\"error\", reject)\n res.on(\"end\", () => resolve(data))", "score": 0.7989227771759033 }, { "filename": "src/common/utils.ts", "retrieved_chunk": " res.writeHead(status, {\n \"Content-type\": \"text/html\",\n })\n res.write(html, \"utf-8\");\n}\nexport function parsePostData(req: http.IncomingMessage): Promise<Array<object>> {\n let form = new IncomingForm({ multiples: false });\n return new Promise((resolve, reject) => {\n form.parse(req, (error, fields: Fields, files: Files) => {\n if (error) reject(error);", "score": 0.7956924438476562 }, { "filename": "src/common/utils.ts", "retrieved_chunk": " })\n })\n let buffer = Buffer.concat(buffArr);\n return buffer;\n } catch (err) {\n console.error(err);\n return null;\n }\n} \nexport function sendHtmlResponse(res: ServerResponse, html: string, status: number = 200) {", "score": 0.7852454781532288 } ]
typescript
let userIssues = await ISSUE_DB.getIssues(parsedAuthToken.id);