prefix
stringlengths 82
32.6k
| middle
stringlengths 5
470
| suffix
stringlengths 0
81.2k
| file_path
stringlengths 6
168
| repo_name
stringlengths 16
77
| context
listlengths 5
5
| lang
stringclasses 4
values | ground_truth
stringlengths 5
470
|
---|---|---|---|---|---|---|---|
import {
isContextNode,
type ContextNode,
type Edge,
type Node,
type Style,
} from "../types";
import {
add,
difference,
multiply,
normalize,
rotate,
splitName,
} from "../utils";
import { SvgGraphics } from "./SvgGraphics";
import { Graphics, Path, Renderable, SvgAttrs } from "./types";
const CTX_STROKE = "#aaaaaa";
const NOTE_STROKE = "#555555";
const ARROW_SIZE = 1.5;
const pickFontSize = (words: string[], w: number) => {
const max = words
.map((word) => word.length)
.sort((a, b) => b - a)
.at(0)!;
return Math.floor(Math.min(Math.max(Math.ceil(w / max), 8), 24));
};
const sizeText = (
text: string[],
w: number,
h: number
): { lines: string[]; fontSize: number } => {
let fontSize = pickFontSize(text, w);
while (fontSize > 5) {
const maxWidth = Math.ceil(w / fontSize) - 1;
const maxHeight = Math.floor(h / fontSize) - 1;
const lines: string[] = [];
let line = text[0];
let n = 1;
while (n < text.length) {
const word = text[n++];
if (line.length + word.length >= maxWidth) {
lines.push(line);
line = word;
} else line = line.concat(line.length ? " " : "", word);
}
lines.push(line);
if (n === text.length && lines.length <= maxHeight)
return {
lines,
fontSize,
};
fontSize--;
}
return {
lines: [text.join(" ")],
fontSize,
};
};
const renderText = (
text: string[],
w: number,
h: number,
g: Graphics,
options: {
fit: boolean;
x?: number;
y?: number;
w?: number;
h?: number;
fontSize?: number;
} = { fit: true }
) => {
const width = options.w || w || 0;
const height = options.h || h || 0;
const { lines, fontSize } = options.fit
? sizeText(text, width, height)
: {
lines: text,
fontSize: options.fontSize || 12,
};
g.attr("font-size", fontSize + "pt");
const x = options.x || Math.floor(width / 2);
const y = options.y || Math.floor(height / 2);
const m = Math.floor(lines.length / 2);
const o = lines.length % 2 ? 0.3 : 0.9;
lines.forEach((line, i) => {
g.text(line, x, y, {
fill: NOTE_STROKE,
dy: `${((i - m) * 1.2 + o).toFixed(2)}em`,
});
});
};
const getPath = (edge: Edge): Path[] => {
if (edge.path) {
const path = edge.path!.slice(1, -1);
const endDir = normalize(difference(path[path.length - 2], path.at(-1)!));
const end = path.length - 1;
const copy = path.map((p) => ({ x: p.x, y: p.y }));
copy[end] = add(
copy[end],
multiply(endDir, ARROW_SIZE * (edge.arrow ? 5 : 0))
);
return copy;
}
const x1 = edge.source.x! + edge.source.width! / 2;
const x2 = edge.target.x! - edge.target.width! / 2;
const y1 = edge.source.y!;
const y2 = edge.target.y!;
if (y1 === y2) return [{ x: x1, y: y1 }, { dx: x2 - x1 }];
const dx = Math.floor((x2 - x1) / 2);
const dy = Math.floor(y2 - y1);
return [{ x: x1, y: y1 }, { dx }, { dy }, { dx }];
};
const renderEdge = (edge: Edge, g: Graphics) => {
const attrs: SvgAttrs = {
fill: "none",
stroke: edge.arrow ? edge.color : edge.target.color,
};
edge.arrow && (attrs["stroke-width"] = 3);
g.path(getPath(edge), false, { ...attrs });
if (edge.arrow) {
const end = edge.path![edge.path!.length - 2];
const path = edge.path!.slice(1, -1);
const dir = normalize(difference(path[path.length - 2], path.at(-1)!));
const x = (s: number) => add(end, multiply(dir, s * ARROW_SIZE));
const y = (s: number) => multiply(rotate(dir), s * ARROW_SIZE);
g.path([add(x(10), y(4)), x(5), add(x(10), y(-4)), end], true, {
...attrs,
fill: edge.color,
});
}
};
const renderSimpleRef = (
node: Node,
x: number,
y: number,
w: number,
h: number,
g: Graphics
) => {
g.group("", "").attr("fill", node.color);
g.rect(x, y, w, h);
renderText(splitName(node.name), w, h, g, {
fit: true,
x: x + w / 2,
y: y + h / 2,
w,
h,
});
g.ungroup();
};
const renderRef = (
node: Node,
x: number,
y: number,
w: number,
h: number,
g: Graphics
) => {
renderSimpleRef(node, x, y, w, h, g);
const hw = Math.ceil(w / 2);
const hh = Math.ceil(h / 2);
node.refs &&
[...node.refs].forEach((target, i) =>
renderSimpleRef(target, x - hw + 4, y + i * (hh + 2) - 4, hw, hh, g)
);
};
const renderMultilineRef = (
targets: Node[],
x: number,
y: number,
w: number,
h: number,
g: Graphics
) => {
const text = targets.map((target) => `- ${splitName(target.name).join(" ")}`);
g.group("", "")
.attr("fill", targets[0].color)
.attr("text-align", "left")
.attr("text-anchor", "start");
g.rect(x, y, w, h);
renderText(text, w, h, g, {
fit: true,
x: x + 4,
y: y + h / 2,
w,
h,
});
g.ungroup();
};
const renderCommandRefs = (
node: Node,
x: number,
y: number,
w: number,
h: number,
g: Graphics
) => {
const targets = [...node.refs!];
const th = Math.floor(h / targets.length);
targets.forEach((target, i) =>
renderRef(target, x, y + i * (th + 2), w, th, g)
);
};
const renderRefs = (node: Node, g: Graphics, style: Style) => {
if (node.refs && node.visual !== "actor") {
const x = Math.floor(node.x! - node.width! / 2 - style.scale * 0.2);
const y = Math.floor(node.y! + node.height! * 0.4);
const w = Math.floor(style.scale);
const h = Math.floor(style.scale / 2);
node.refs?.size > 1
? node.visual === "command"
? renderCommandRefs(node, x, y, w, h, g)
: renderMultilineRef([...node.refs], x, y, w, h, g)
: renderRef([...node.refs][0], x, y, w, h, g);
}
};
const context: Renderable = (ctx: Node, g: Graphics, style: Style) => {
if (isContextNode(ctx)) {
if (ctx.name) {
const words = splitName(ctx.name);
g.text(words.join(" "), 0, 0, {
fill: CTX_STROKE,
stroke: CTX_STROKE,
dy: -style.fontSize,
});
g.rect(0, 0, ctx.width!, ctx.height!, { rx: 25, ry: 25 });
}
g.group(`n${ctx.index}`, "", { dx: style.padding, dy: style.padding });
if (ctx.name)
g.attr("text-align", "center")
.attr("text-anchor", "middle")
.attr("stroke", NOTE_STROKE);
ctx.edges.forEach(
(e) =>
e.color &&
renderEdge({ ...e, source: ctx.nodes.get(e.source.name)! }, g)
);
ctx.nodes.forEach((n) => n.color && renderNode(n, g, style));
ctx.nodes.forEach((n) => n.refs && renderRefs(n, g, style));
g.ungroup();
}
};
const note: Renderable = (node: Node, g: Graphics) => {
g.attr("fill", node.color!);
g.rect(0, 0, node.width!, node.height!);
if (node.rels)
g.attr(
"class",
node.visual.concat(" ", [...node.rels].map((i) => `n${i}`).join(" "))
);
renderText(splitName(node.name), node.width!, node.height!, g);
const schema = node.ctx.schemas.get(node.name);
schema &&
g.text(`{${schema.size}}`, node.width! - 6, 6, {
"font-size": "6pt",
fill: NOTE_STROKE,
});
};
const renderNode = (node: Node, g: Graphics, style: Style) => {
const dx = Math.floor(node.x! - node.width! / 2);
const dy = Math.floor(node.y! - node.height! / 2);
const render =
node.visual === "context"
? context
: node.visual !== "actor"
? note
: undefined; // don't render actors as nodes
if (render) {
g.group(`n${node.index}`, node.name, { class: node.visual, dx, dy });
render(node, g, style);
g.ungroup();
}
};
export const render = (root: ContextNode, style: Style): string => {
const | g = new SvgGraphics({ |
fill: style.fill,
"font-family": style.font,
"font-size": style.fontSize + "pt",
"text-align": "left",
stroke: style.stroke,
"stroke-width": 1,
});
context(root, g, style);
return g.serialize();
};
| src/graphics/render.ts | Rotorsoft-esml-85e903b | [
{
"filename": "src/graphics/layout.ts",
"retrieved_chunk": "const rectangle: Layouter = (node: Node, style: Style) => {\n node.x = 0;\n node.y = 0;\n node.width = style.scale * 2;\n node.height = style.scale;\n};\n// don't render actors as nodes\nconst actor: Layouter = (node: Node) => {\n node.x = 0;\n node.y = 0;",
"score": 58.546774991964824
},
{
"filename": "src/canvas.ts",
"retrieved_chunk": " this.nodes.style.visibility = \"hidden\";\n nodes.map((node) => {\n const g = this.document.getElementById(\"n\" + node.index);\n if (g) {\n if (!g.classList.contains(\"context\")) {\n g.addEventListener(\"mouseenter\", handleMouseEnter);\n g.addEventListener(\"mouseleave\", handleMouseLeave);\n }\n // details\n const schema = node.ctx.schemas.get(node.name);",
"score": 51.25458101414943
},
{
"filename": "src/canvas.ts",
"retrieved_chunk": " if (node.description || schema) {\n const el = this.document.createElement(\"div\");\n el.id = `node-${g.id}`;\n el.innerHTML = this.renderNodeDetails(node, schema);\n this.nodes?.appendChild(el);\n }\n }\n });\n }\n public render(state: State): Error | undefined {",
"score": 46.64096113232882
},
{
"filename": "src/canvas.ts",
"retrieved_chunk": " const handleMouseEnter = (event: MouseEvent) => {\n const g = event.target as SVGGElement;\n if (fadable[g.id]) fade(g.id);\n const node = this.document.getElementById(\"node-\" + g.id);\n if (this.details && node) {\n this.details.innerHTML = node.innerHTML;\n this.details.style.visibility = \"visible\";\n }\n };\n const handleMouseLeave = () => {",
"score": 43.76024134035068
},
{
"filename": "src/graphics/SvgGraphics.ts",
"retrieved_chunk": " constructor(attrs: SvgAttrs) {\n this.root = this.current = new SvgElement(\"g\", {\n ...attrs,\n \"data-name\": \"root\",\n });\n }\n group(\n id: string,\n name: string,\n attrs?: { class?: string; dx?: number; dy?: number }",
"score": 32.79686828623738
}
] | typescript | g = new SvgGraphics({ |
import {
isContextNode,
type ContextNode,
type Edge,
type Node,
type Style,
} from "../types";
import {
add,
difference,
multiply,
normalize,
rotate,
splitName,
} from "../utils";
import { SvgGraphics } from "./SvgGraphics";
import { Graphics, Path, Renderable, SvgAttrs } from "./types";
const CTX_STROKE = "#aaaaaa";
const NOTE_STROKE = "#555555";
const ARROW_SIZE = 1.5;
const pickFontSize = (words: string[], w: number) => {
const max = words
.map((word) => word.length)
.sort((a, b) => b - a)
.at(0)!;
return Math.floor(Math.min(Math.max(Math.ceil(w / max), 8), 24));
};
const sizeText = (
text: string[],
w: number,
h: number
): { lines: string[]; fontSize: number } => {
let fontSize = pickFontSize(text, w);
while (fontSize > 5) {
const maxWidth = Math.ceil(w / fontSize) - 1;
const maxHeight = Math.floor(h / fontSize) - 1;
const lines: string[] = [];
let line = text[0];
let n = 1;
while (n < text.length) {
const word = text[n++];
if (line.length + word.length >= maxWidth) {
lines.push(line);
line = word;
} else line = line.concat(line.length ? " " : "", word);
}
lines.push(line);
if (n === text.length && lines.length <= maxHeight)
return {
lines,
fontSize,
};
fontSize--;
}
return {
lines: [text.join(" ")],
fontSize,
};
};
const renderText = (
text: string[],
w: number,
h: number,
g: Graphics,
options: {
fit: boolean;
x?: number;
y?: number;
w?: number;
h?: number;
fontSize?: number;
} = { fit: true }
) => {
const width = options.w || w || 0;
const height = options.h || h || 0;
const { lines, fontSize } = options.fit
? sizeText(text, width, height)
: {
lines: text,
fontSize: options.fontSize || 12,
};
g.attr("font-size", fontSize + "pt");
const x = options.x || Math.floor(width / 2);
const y = options.y || Math.floor(height / 2);
const m = Math.floor(lines.length / 2);
const o = lines.length % 2 ? 0.3 : 0.9;
lines.forEach((line, i) => {
g.text(line, x, y, {
fill: NOTE_STROKE,
dy: `${((i - m) * 1.2 + o).toFixed(2)}em`,
});
});
};
| const getPath = (edge: Edge): Path[] => { |
if (edge.path) {
const path = edge.path!.slice(1, -1);
const endDir = normalize(difference(path[path.length - 2], path.at(-1)!));
const end = path.length - 1;
const copy = path.map((p) => ({ x: p.x, y: p.y }));
copy[end] = add(
copy[end],
multiply(endDir, ARROW_SIZE * (edge.arrow ? 5 : 0))
);
return copy;
}
const x1 = edge.source.x! + edge.source.width! / 2;
const x2 = edge.target.x! - edge.target.width! / 2;
const y1 = edge.source.y!;
const y2 = edge.target.y!;
if (y1 === y2) return [{ x: x1, y: y1 }, { dx: x2 - x1 }];
const dx = Math.floor((x2 - x1) / 2);
const dy = Math.floor(y2 - y1);
return [{ x: x1, y: y1 }, { dx }, { dy }, { dx }];
};
const renderEdge = (edge: Edge, g: Graphics) => {
const attrs: SvgAttrs = {
fill: "none",
stroke: edge.arrow ? edge.color : edge.target.color,
};
edge.arrow && (attrs["stroke-width"] = 3);
g.path(getPath(edge), false, { ...attrs });
if (edge.arrow) {
const end = edge.path![edge.path!.length - 2];
const path = edge.path!.slice(1, -1);
const dir = normalize(difference(path[path.length - 2], path.at(-1)!));
const x = (s: number) => add(end, multiply(dir, s * ARROW_SIZE));
const y = (s: number) => multiply(rotate(dir), s * ARROW_SIZE);
g.path([add(x(10), y(4)), x(5), add(x(10), y(-4)), end], true, {
...attrs,
fill: edge.color,
});
}
};
const renderSimpleRef = (
node: Node,
x: number,
y: number,
w: number,
h: number,
g: Graphics
) => {
g.group("", "").attr("fill", node.color);
g.rect(x, y, w, h);
renderText(splitName(node.name), w, h, g, {
fit: true,
x: x + w / 2,
y: y + h / 2,
w,
h,
});
g.ungroup();
};
const renderRef = (
node: Node,
x: number,
y: number,
w: number,
h: number,
g: Graphics
) => {
renderSimpleRef(node, x, y, w, h, g);
const hw = Math.ceil(w / 2);
const hh = Math.ceil(h / 2);
node.refs &&
[...node.refs].forEach((target, i) =>
renderSimpleRef(target, x - hw + 4, y + i * (hh + 2) - 4, hw, hh, g)
);
};
const renderMultilineRef = (
targets: Node[],
x: number,
y: number,
w: number,
h: number,
g: Graphics
) => {
const text = targets.map((target) => `- ${splitName(target.name).join(" ")}`);
g.group("", "")
.attr("fill", targets[0].color)
.attr("text-align", "left")
.attr("text-anchor", "start");
g.rect(x, y, w, h);
renderText(text, w, h, g, {
fit: true,
x: x + 4,
y: y + h / 2,
w,
h,
});
g.ungroup();
};
const renderCommandRefs = (
node: Node,
x: number,
y: number,
w: number,
h: number,
g: Graphics
) => {
const targets = [...node.refs!];
const th = Math.floor(h / targets.length);
targets.forEach((target, i) =>
renderRef(target, x, y + i * (th + 2), w, th, g)
);
};
const renderRefs = (node: Node, g: Graphics, style: Style) => {
if (node.refs && node.visual !== "actor") {
const x = Math.floor(node.x! - node.width! / 2 - style.scale * 0.2);
const y = Math.floor(node.y! + node.height! * 0.4);
const w = Math.floor(style.scale);
const h = Math.floor(style.scale / 2);
node.refs?.size > 1
? node.visual === "command"
? renderCommandRefs(node, x, y, w, h, g)
: renderMultilineRef([...node.refs], x, y, w, h, g)
: renderRef([...node.refs][0], x, y, w, h, g);
}
};
const context: Renderable = (ctx: Node, g: Graphics, style: Style) => {
if (isContextNode(ctx)) {
if (ctx.name) {
const words = splitName(ctx.name);
g.text(words.join(" "), 0, 0, {
fill: CTX_STROKE,
stroke: CTX_STROKE,
dy: -style.fontSize,
});
g.rect(0, 0, ctx.width!, ctx.height!, { rx: 25, ry: 25 });
}
g.group(`n${ctx.index}`, "", { dx: style.padding, dy: style.padding });
if (ctx.name)
g.attr("text-align", "center")
.attr("text-anchor", "middle")
.attr("stroke", NOTE_STROKE);
ctx.edges.forEach(
(e) =>
e.color &&
renderEdge({ ...e, source: ctx.nodes.get(e.source.name)! }, g)
);
ctx.nodes.forEach((n) => n.color && renderNode(n, g, style));
ctx.nodes.forEach((n) => n.refs && renderRefs(n, g, style));
g.ungroup();
}
};
const note: Renderable = (node: Node, g: Graphics) => {
g.attr("fill", node.color!);
g.rect(0, 0, node.width!, node.height!);
if (node.rels)
g.attr(
"class",
node.visual.concat(" ", [...node.rels].map((i) => `n${i}`).join(" "))
);
renderText(splitName(node.name), node.width!, node.height!, g);
const schema = node.ctx.schemas.get(node.name);
schema &&
g.text(`{${schema.size}}`, node.width! - 6, 6, {
"font-size": "6pt",
fill: NOTE_STROKE,
});
};
const renderNode = (node: Node, g: Graphics, style: Style) => {
const dx = Math.floor(node.x! - node.width! / 2);
const dy = Math.floor(node.y! - node.height! / 2);
const render =
node.visual === "context"
? context
: node.visual !== "actor"
? note
: undefined; // don't render actors as nodes
if (render) {
g.group(`n${node.index}`, node.name, { class: node.visual, dx, dy });
render(node, g, style);
g.ungroup();
}
};
export const render = (root: ContextNode, style: Style): string => {
const g = new SvgGraphics({
fill: style.fill,
"font-family": style.font,
"font-size": style.fontSize + "pt",
"text-align": "left",
stroke: style.stroke,
"stroke-width": 1,
});
context(root, g, style);
return g.serialize();
};
| src/graphics/render.ts | Rotorsoft-esml-85e903b | [
{
"filename": "src/graphics/SvgGraphics.ts",
"retrieved_chunk": " p.x && p.y\n ? (i ? \"L\" : \"M\") + p.x.toFixed(0) + \" \" + p.y.toFixed(0)\n : p.dx\n ? \"h\" + p.dx.toFixed(0)\n : p.dy\n ? \"v\" + p.dy.toFixed(0)\n : \"\"\n )\n .join(\" \")\n .concat(close ? \" Z\" : \"\");",
"score": 32.11112315421203
},
{
"filename": "src/canvas.ts",
"retrieved_chunk": " this.fitZoom(this.zoom + z);\n this.transform();\n }\n private fitZoom(z: number) {\n this.zoom = Math.round(Math.min(Math.max(0.1, z), 3) * 100) / 100;\n }\n private transform(dx = 0, dy = 0) {\n const g = this.svg.children[0];\n if (g) {\n this.x = Math.floor(",
"score": 30.899693958690317
},
{
"filename": "src/esml.ts",
"retrieved_chunk": " error.issues\n .map((i) => `${i.path.join(\".\")}: ${i.message}`)\n .join(\"\\n\")\n ),\n };\n if (error instanceof Error) {\n const message = error.stack!.split(\"\\n\").slice(0, 2).join(\" \");\n return { error: Error(message) };\n }\n return { error: Error(error) };",
"score": 29.16312602357492
},
{
"filename": "src/canvas.ts",
"retrieved_chunk": " const vh = Math.min(this.container.clientHeight, window.innerHeight);\n if (this.w && this.h && vw && vh) {\n // avoid NaN\n this.fitZoom(Math.min(vw / this.w, vh / this.h));\n this.x = Math.floor((vw - this.w * this.zoom) / 2);\n this.y = Math.floor((vh - this.h * this.zoom) / 2);\n this.transform();\n }\n }\n private zoomTo(z: number) {",
"score": 26.522644486837684
},
{
"filename": "src/graphics/layout.ts",
"retrieved_chunk": "const rectangle: Layouter = (node: Node, style: Style) => {\n node.x = 0;\n node.y = 0;\n node.width = style.scale * 2;\n node.height = style.scale;\n};\n// don't render actors as nodes\nconst actor: Layouter = (node: Node) => {\n node.x = 0;\n node.y = 0;",
"score": 25.017026695999057
}
] | typescript | const getPath = (edge: Edge): Path[] => { |
import {
FormattingNodeConstructorParameters,
InlineToolName,
InlineToolData
} from './types';
import { ChildNode, InlineFragment, InlineNode, InlineNodeSerialized, ParentNode } from '../interfaces';
export * from './types';
/**
* We need to extend FormattingNode interface with ChildNode and ParentNode ones to use the methods from mixins
*/
export interface FormattingNode extends ChildNode, ParentNode {}
/**
* FormattingNode class represents a node in a tree-like structure, used to store and manipulate formatted text content
*/
@ParentNode
@ChildNode
export class FormattingNode implements InlineNode {
/**
* Private field representing the name of the formatting tool applied to the content
*/
#tool: InlineToolName;
/**
* Any additional data associated with the formatting tool
*/
#data?: InlineToolData;
/**
* Constructor for FormattingNode class.
*
* @param args - FormattingNode constructor arguments.
* @param args.tool - The name of the formatting tool applied to the content.
* @param args.data - Any additional data associated with the formatting.
*/
constructor({ tool, data }: FormattingNodeConstructorParameters) {
this.#tool = tool;
this.#data = data;
}
/**
* Returns text value length of current node (including subtree)
*/
public get length(): number {
| return this.children.reduce((sum, child) => sum + child.length, 0); |
}
/**
* Returns serialized value of the node: text and formatting fragments
*/
public get serialized(): InlineNodeSerialized {
return {
text: this.getText(),
fragments: this.getFragments(),
};
}
/**
* Inserts text to the specified index, by default appends text to the end of the current value
*
* @param text - text to insert
* @param [index] - char index where to insert text
*/
public insertText(text: string, index = this.length): void {
const [child, offset] = this.#findChildByIndex(index);
child?.insertText(text, index - offset);
}
/**
* Removes text form the specified range
*
* @param [start] - start char index of the range, by default 0
* @param [end] - end char index of the range, by default length of the text value
* @returns {string} removed text
*/
public removeText(start = 0, end = this.length): string {
const result = this.#reduceChildrenInRange(
start,
end,
(acc, child, childStart, childEnd) => {
return acc + child.removeText(childStart, childEnd);
},
''
);
if (this.length === 0) {
this.remove();
}
return result;
}
/**
* Returns text from the specified range
*
* @param [start] - start char index of the range, by default 0
* @param [end] - end char index of the range, by default length of the text value
*/
public getText(start = 0, end = this.length): string {
return this.#reduceChildrenInRange(
start,
end,
(acc, child, childStart, childEnd) => {
return acc + child.getText(childStart, childEnd);
},
''
);
}
/**
* Returns inline fragments for subtree including current node from the specified range
*
* @param [start] - start char index of the range, by default 0
* @param [end] - end char index of the range, by default length of the text value
*/
public getFragments(start = 0, end = this.length): InlineFragment[] {
return this.#reduceChildrenInRange<InlineFragment[]>(
start,
end,
(acc, child, childStart, childEnd) => {
/**
* If child is not a FormattingNode, it doesn't include any fragments. So we skip it.
*/
if (!(child instanceof FormattingNode)) {
return acc;
}
acc.push(...child.getFragments(childStart, childEnd));
return acc;
},
[ {
tool: this.#tool,
data: this.#data,
range: [start, end],
} ]
);
}
/**
* Splits current node by the specified index
*
* @param index - char index where to split the node
* @returns {FormattingNode | null} new node
*/
public split(index: number): FormattingNode | null {
if (index === 0 || index === this.length) {
return null;
}
const newNode = new FormattingNode({
tool: this.#tool,
data: this.#data,
});
const [child, offset] = this.#findChildByIndex(index);
if (!child) {
return null;
}
// Have to save length as it is changed after split
const childLength = child.length;
const splitNode = child.split(index - offset);
let midNodeIndex = this.children.indexOf(child);
/**
* If node is split or if node is not split but index equals to child length, we should split children from the next node
*/
if (splitNode || (index - offset === childLength)) {
midNodeIndex += 1;
}
newNode.append(...this.children.slice(midNodeIndex));
this.parent?.insertAfter(this, newNode);
return newNode;
}
/**
* Applies formatting to the text with specified inline tool in the specified range
*
* @param tool - name of inline tool to apply
* @param start - char start index of the range
* @param end - char end index of the range
* @param [data] - inline tool data if applicable
*/
public format(tool: InlineToolName, start: number, end: number, data?: InlineToolData): InlineNode[] {
/**
* In case current tool is the same as new one, do nothing
*/
if (tool === this.#tool) {
return [];
}
return this.#reduceChildrenInRange<InlineNode[]>(
start,
end,
(acc, child, childStart, childEnd) => {
acc.push(...child.format(tool, childStart, childEnd, data));
return acc;
},
[]
);
}
/**
* Iterates through children in range and calls callback for each
*
* @param start - range start char index
* @param end - range end char index
* @param callback - callback to apply on children
* @param initialValue - initial accumulator value
* @private
*/
#reduceChildrenInRange<Acc>(
start: number,
end: number,
callback: (acc: Acc, child: InlineNode, start: number, end: number) => Acc,
initialValue: Acc
): Acc {
let result = initialValue;
for (const child of this.children) {
if (start < child.length && end > 0 && start < end) {
result = callback(result, child, Math.max(start, 0), Math.min(child.length, end));
}
start -= child.length;
end -= child.length;
}
return result;
}
/**
* Returns child by passed text index
*
* @param index - char index
* @private
*/
#findChildByIndex(index: number): [child: InlineNode & ChildNode | null, offset: number] {
let totalLength = 0;
for (const child of this.children) {
if (index <= child.length + totalLength) {
return [child, totalLength];
}
totalLength += child.length;
}
return [null, totalLength];
}
}
| src/entities/FormattingNode/index.ts | editor-js-document-model-4cb9623 | [
{
"filename": "src/entities/TextNode/index.ts",
"retrieved_chunk": " constructor({ value = '' }: TextNodeConstructorParameters = {}) {\n this.#value = value;\n }\n /**\n * Returns length of the text\n */\n public get length(): number {\n return this.#value.length;\n }\n /**",
"score": 37.00102700416433
},
{
"filename": "src/entities/EditorDocument/index.ts",
"retrieved_chunk": " * @param args.properties - The properties of the document.\n */\n constructor({ children, properties }: EditorDocumentConstructorParameters) {\n this.#children = children;\n this.#properties = properties;\n }\n /**\n * Returns count of child BlockNodes of the EditorDocument.\n */\n public get length(): number {",
"score": 29.76167420623889
},
{
"filename": "src/entities/TextNode/index.ts",
"retrieved_chunk": " public format(tool: InlineToolName, start: number, end: number, data?: InlineToolData): InlineNode[] {\n this.#validateIndex(start);\n this.#validateIndex(end);\n const formattingNode = new FormattingNode({\n tool,\n data,\n });\n const fragments: ChildNode[] = [];\n /**\n * If start index is greater than 0, we need extract part of the text before the start index",
"score": 29.38928778991287
},
{
"filename": "src/entities/interfaces/ParentNode.ts",
"retrieved_chunk": " this.#children = children;\n this.children.forEach(child => child.appendTo(this));\n }\n /**\n * Returns node's children\n */\n public get children(): ChildNode[] {\n return this.#children;\n }\n /**",
"score": 27.602935231395485
},
{
"filename": "src/entities/BlockTune/index.ts",
"retrieved_chunk": " */\n public update(key: string, value: unknown): void {\n this.#data[key] = value;\n }\n /**\n * Returns serialized version of the BlockTune.\n */\n public get serialized(): BlockTuneSerialized {\n return {\n name: this.#name,",
"score": 24.90288097171081
}
] | typescript | return this.children.reduce((sum, child) => sum + child.length, 0); |
import { createBlockTuneName } from './index';
import { createBlockTuneMock } from '../../utils/mocks/createBlockTuneMock';
describe('BlockTune', () => {
describe('.update()', () => {
it('should add field to data object by key if it doesn\'t exist', () => {
// Arrange
const blockTune = createBlockTuneMock({
data: {},
});
// Act
blockTune.update('align', 'left');
// Assert
expect(blockTune.serialized.data).toEqual({
align: 'left',
});
});
it('should update field in data object by key', () => {
// Arrange
const blockTune = createBlockTuneMock({
data: {
align: 'center',
},
});
// Act
blockTune.update('align', 'right');
// Assert
expect(blockTune.serialized.data).toEqual({
align: 'right',
});
});
});
describe('.serialized', () => {
it('should return serialized version of the BlockTune', () => {
// Arrange
const tune = createBlockTuneMock({
name: | createBlockTuneName('styling'),
data: { |
background: 'transparent',
},
});
// Act
const tuneSerialized = tune.serialized;
// Assert
expect(tuneSerialized).toEqual(
{
name: 'styling',
data: {
background: 'transparent',
},
}
);
});
});
});
| src/entities/BlockTune/BlockTune.spec.ts | editor-js-document-model-4cb9623 | [
{
"filename": "src/utils/mocks/createBlockTuneMock.ts",
"retrieved_chunk": "import { BlockTune, BlockTuneName, createBlockTuneName } from '../../entities/BlockTune';\n/**\n * Creates a BlockTune instance with the given name and data.\n *\n * @param args - BlockTune constructor arguments.\n * @param args.name - The name of the tune.\n * @param args.data - Any additional data associated with the tune.\n */\nexport function createBlockTuneMock({ name, data }: {\n name?: BlockTuneName,",
"score": 21.58998922556795
},
{
"filename": "src/entities/BlockTune/types/BlockTuneSerialized.ts",
"retrieved_chunk": "/**\n * BlockTuneSerialized represents a serialized version of a BlockTune.\n */\nexport interface BlockTuneSerialized {\n /**\n * The name of the tune.\n * Serialized as a string.\n */\n name: string;\n /**",
"score": 20.369368288301736
},
{
"filename": "src/entities/BlockTune/index.ts",
"retrieved_chunk": " */\n public update(key: string, value: unknown): void {\n this.#data[key] = value;\n }\n /**\n * Returns serialized version of the BlockTune.\n */\n public get serialized(): BlockTuneSerialized {\n return {\n name: this.#name,",
"score": 20.271177414801727
},
{
"filename": "src/utils/mocks/createBlockTuneMock.ts",
"retrieved_chunk": " data?: Record<string, unknown>,\n}): BlockTune {\n return new BlockTune(\n {\n name: name || createBlockTuneName('aligning'),\n data: data || {},\n }\n );\n}",
"score": 16.281937807922755
},
{
"filename": "src/entities/ValueNode/ValueNode.spec.ts",
"retrieved_chunk": " longitudeValueNode.update(updatedLongitude);\n // Assert\n expect(longitudeValueNode.serialized).toBe(updatedLongitude);\n });\n });\n describe('.serialized', () => {\n it('should return the serialized data associated with this value node', () => {\n // Arrange\n const longitude = 23.123;\n const longitudeValueNode = createValueNodeMock({",
"score": 15.921583309572647
}
] | typescript | createBlockTuneName('styling'),
data: { |
import { BlockNode } from './index';
import { createBlockNodeName, createDataKey } from './types';
import { BlockTune, createBlockTuneName } from '../BlockTune';
import { TextNode } from '../TextNode';
import { ValueNode } from '../ValueNode';
import type { EditorDocument } from '../EditorDocument';
import type { BlockTuneConstructorParameters } from '../BlockTune/types';
import type { TextNodeConstructorParameters } from '../TextNode';
import type { ValueNodeConstructorParameters } from '../ValueNode';
describe('BlockNode', () => {
describe('.serialized', () => {
beforeEach(() => {
jest.mock('../BlockTune', () => ({
BlockTune: jest.fn().mockImplementation(() => ({}) as BlockTune),
serialized: jest.fn(),
}));
jest.mock('../TextNode', () => ({
TextNode: jest.fn().mockImplementation(() => ({}) as TextNode),
serialized: jest.fn(),
}));
jest.mock('../ValueNode', () => ({
ValueNode: jest.fn().mockImplementation(() => ({}) as ValueNode),
serialized: jest.fn(),
}));
});
afterEach(() => {
jest.clearAllMocks();
});
it('should return a name of a tool that created a BlockNode', () => {
const blockNodeName = createBlockNodeName('paragraph');
const blockNode = new BlockNode({
name: blockNodeName,
data: {},
parent: {} as EditorDocument,
});
const serialized = blockNode.serialized;
expect(serialized.name).toEqual(blockNodeName);
});
it('should call .serialized getter of all tunes associated with the BlockNode', () => {
const blockTunesNames = [
createBlockTuneName('align'),
createBlockTuneName('font-size'),
createBlockTuneName('font-weight'),
];
const blockTunes = blockTunesNames.reduce((acc, name) => ({
...acc,
| [name]: new BlockTune({} as BlockTuneConstructorParameters),
}), {}); |
const spyArray = Object
.values(blockTunes)
.map((blockTune) => {
return jest.spyOn(blockTune as BlockTune, 'serialized', 'get');
});
const blockNode = new BlockNode({
name: createBlockNodeName('paragraph'),
data: {},
parent: {} as EditorDocument,
tunes: blockTunes,
});
blockNode.serialized;
spyArray.forEach((spy) => {
expect(spy).toHaveBeenCalled();
});
});
it('should call .serialized getter of all child ValueNodes associated with the BlockNode', () => {
const countOfValueNodes = 2;
const valueNodes = [ ...Array(countOfValueNodes).keys() ]
.reduce((acc, index) => ({
...acc,
[createDataKey(`data-key-${index}c${index}d`)]: new ValueNode({} as ValueNodeConstructorParameters),
}), {});
const spyArray = Object
.values(valueNodes)
.map((valueNode) => {
return jest.spyOn(valueNode as ValueNode, 'serialized', 'get');
});
const blockNode = new BlockNode({
name: createBlockNodeName('paragraph'),
data: {
...valueNodes,
},
parent: {} as EditorDocument,
});
blockNode.serialized;
spyArray.forEach((spy) => {
expect(spy).toHaveBeenCalled();
});
});
it('should call .serialized getter of all child TextNodes associated with the BlockNode', () => {
const countOfTextNodes = 3;
const textNodes = [ ...Array(countOfTextNodes).keys() ]
.map(() => new TextNode({} as TextNodeConstructorParameters));
const spyArray = textNodes
.map((textNode) => {
return jest.spyOn(textNode, 'serialized', 'get');
});
const blockNode = new BlockNode({
name: createBlockNodeName('paragraph'),
data: {
[createDataKey('data-key-1a2b')]: textNodes,
},
parent: {} as EditorDocument,
});
blockNode.serialized;
spyArray.forEach((spy) => {
expect(spy).toHaveBeenCalled();
});
});
});
});
| src/entities/BlockNode/BlockNode.spec.ts | editor-js-document-model-4cb9623 | [
{
"filename": "src/entities/BlockNode/index.ts",
"retrieved_chunk": " .entries(this.#tunes)\n .reduce(\n (acc, [name, tune]) => {\n acc[name] = tune.serialized;\n return acc;\n },\n {} as Record<string, BlockTuneSerialized>\n );\n return {\n name: this.#name,",
"score": 33.90940258446827
},
{
"filename": "src/entities/BlockTune/index.ts",
"retrieved_chunk": "import { BlockTuneConstructorParameters, BlockTuneName, BlockTuneSerialized, createBlockTuneName } from './types';\n/**\n * BlockTune class represents a set of additional information associated with a BlockNode.\n * This information can be used by a BlockTool to modify the behavior of the BlockNode.\n */\nexport class BlockTune {\n /**\n * Private field representing the name of the tune\n */\n #name: BlockTuneName;",
"score": 26.63182881141432
},
{
"filename": "src/entities/BlockTune/BlockTune.spec.ts",
"retrieved_chunk": " });\n describe('.serialized', () => {\n it('should return serialized version of the BlockTune', () => {\n // Arrange\n const tune = createBlockTuneMock({\n name: createBlockTuneName('styling'),\n data: {\n background: 'transparent',\n },\n });",
"score": 26.297763784485323
},
{
"filename": "src/utils/mocks/createBlockTuneMock.ts",
"retrieved_chunk": " data?: Record<string, unknown>,\n}): BlockTune {\n return new BlockTune(\n {\n name: name || createBlockTuneName('aligning'),\n data: data || {},\n }\n );\n}",
"score": 26.00068652553299
},
{
"filename": "src/utils/mocks/createBlockTuneMock.ts",
"retrieved_chunk": "import { BlockTune, BlockTuneName, createBlockTuneName } from '../../entities/BlockTune';\n/**\n * Creates a BlockTune instance with the given name and data.\n *\n * @param args - BlockTune constructor arguments.\n * @param args.name - The name of the tune.\n * @param args.data - Any additional data associated with the tune.\n */\nexport function createBlockTuneMock({ name, data }: {\n name?: BlockTuneName,",
"score": 24.052383666214034
}
] | typescript | [name]: new BlockTune({} as BlockTuneConstructorParameters),
}), {}); |
import {
FormattingNodeConstructorParameters,
InlineToolName,
InlineToolData
} from './types';
import { ChildNode, InlineFragment, InlineNode, InlineNodeSerialized, ParentNode } from '../interfaces';
export * from './types';
/**
* We need to extend FormattingNode interface with ChildNode and ParentNode ones to use the methods from mixins
*/
export interface FormattingNode extends ChildNode, ParentNode {}
/**
* FormattingNode class represents a node in a tree-like structure, used to store and manipulate formatted text content
*/
@ParentNode
@ChildNode
export class FormattingNode implements InlineNode {
/**
* Private field representing the name of the formatting tool applied to the content
*/
#tool: InlineToolName;
/**
* Any additional data associated with the formatting tool
*/
#data?: InlineToolData;
/**
* Constructor for FormattingNode class.
*
* @param args - FormattingNode constructor arguments.
* @param args.tool - The name of the formatting tool applied to the content.
* @param args.data - Any additional data associated with the formatting.
*/
constructor({ tool, data }: FormattingNodeConstructorParameters) {
this.#tool = tool;
this.#data = data;
}
/**
* Returns text value length of current node (including subtree)
*/
public get length(): number {
return this | .children.reduce((sum, child) => sum + child.length, 0); |
}
/**
* Returns serialized value of the node: text and formatting fragments
*/
public get serialized(): InlineNodeSerialized {
return {
text: this.getText(),
fragments: this.getFragments(),
};
}
/**
* Inserts text to the specified index, by default appends text to the end of the current value
*
* @param text - text to insert
* @param [index] - char index where to insert text
*/
public insertText(text: string, index = this.length): void {
const [child, offset] = this.#findChildByIndex(index);
child?.insertText(text, index - offset);
}
/**
* Removes text form the specified range
*
* @param [start] - start char index of the range, by default 0
* @param [end] - end char index of the range, by default length of the text value
* @returns {string} removed text
*/
public removeText(start = 0, end = this.length): string {
const result = this.#reduceChildrenInRange(
start,
end,
(acc, child, childStart, childEnd) => {
return acc + child.removeText(childStart, childEnd);
},
''
);
if (this.length === 0) {
this.remove();
}
return result;
}
/**
* Returns text from the specified range
*
* @param [start] - start char index of the range, by default 0
* @param [end] - end char index of the range, by default length of the text value
*/
public getText(start = 0, end = this.length): string {
return this.#reduceChildrenInRange(
start,
end,
(acc, child, childStart, childEnd) => {
return acc + child.getText(childStart, childEnd);
},
''
);
}
/**
* Returns inline fragments for subtree including current node from the specified range
*
* @param [start] - start char index of the range, by default 0
* @param [end] - end char index of the range, by default length of the text value
*/
public getFragments(start = 0, end = this.length): InlineFragment[] {
return this.#reduceChildrenInRange<InlineFragment[]>(
start,
end,
(acc, child, childStart, childEnd) => {
/**
* If child is not a FormattingNode, it doesn't include any fragments. So we skip it.
*/
if (!(child instanceof FormattingNode)) {
return acc;
}
acc.push(...child.getFragments(childStart, childEnd));
return acc;
},
[ {
tool: this.#tool,
data: this.#data,
range: [start, end],
} ]
);
}
/**
* Splits current node by the specified index
*
* @param index - char index where to split the node
* @returns {FormattingNode | null} new node
*/
public split(index: number): FormattingNode | null {
if (index === 0 || index === this.length) {
return null;
}
const newNode = new FormattingNode({
tool: this.#tool,
data: this.#data,
});
const [child, offset] = this.#findChildByIndex(index);
if (!child) {
return null;
}
// Have to save length as it is changed after split
const childLength = child.length;
const splitNode = child.split(index - offset);
let midNodeIndex = this.children.indexOf(child);
/**
* If node is split or if node is not split but index equals to child length, we should split children from the next node
*/
if (splitNode || (index - offset === childLength)) {
midNodeIndex += 1;
}
newNode.append(...this.children.slice(midNodeIndex));
this.parent?.insertAfter(this, newNode);
return newNode;
}
/**
* Applies formatting to the text with specified inline tool in the specified range
*
* @param tool - name of inline tool to apply
* @param start - char start index of the range
* @param end - char end index of the range
* @param [data] - inline tool data if applicable
*/
public format(tool: InlineToolName, start: number, end: number, data?: InlineToolData): InlineNode[] {
/**
* In case current tool is the same as new one, do nothing
*/
if (tool === this.#tool) {
return [];
}
return this.#reduceChildrenInRange<InlineNode[]>(
start,
end,
(acc, child, childStart, childEnd) => {
acc.push(...child.format(tool, childStart, childEnd, data));
return acc;
},
[]
);
}
/**
* Iterates through children in range and calls callback for each
*
* @param start - range start char index
* @param end - range end char index
* @param callback - callback to apply on children
* @param initialValue - initial accumulator value
* @private
*/
#reduceChildrenInRange<Acc>(
start: number,
end: number,
callback: (acc: Acc, child: InlineNode, start: number, end: number) => Acc,
initialValue: Acc
): Acc {
let result = initialValue;
for (const child of this.children) {
if (start < child.length && end > 0 && start < end) {
result = callback(result, child, Math.max(start, 0), Math.min(child.length, end));
}
start -= child.length;
end -= child.length;
}
return result;
}
/**
* Returns child by passed text index
*
* @param index - char index
* @private
*/
#findChildByIndex(index: number): [child: InlineNode & ChildNode | null, offset: number] {
let totalLength = 0;
for (const child of this.children) {
if (index <= child.length + totalLength) {
return [child, totalLength];
}
totalLength += child.length;
}
return [null, totalLength];
}
}
| src/entities/FormattingNode/index.ts | editor-js-document-model-4cb9623 | [
{
"filename": "src/entities/TextNode/index.ts",
"retrieved_chunk": " constructor({ value = '' }: TextNodeConstructorParameters = {}) {\n this.#value = value;\n }\n /**\n * Returns length of the text\n */\n public get length(): number {\n return this.#value.length;\n }\n /**",
"score": 37.00102700416433
},
{
"filename": "src/entities/EditorDocument/index.ts",
"retrieved_chunk": " * @param args.properties - The properties of the document.\n */\n constructor({ children, properties }: EditorDocumentConstructorParameters) {\n this.#children = children;\n this.#properties = properties;\n }\n /**\n * Returns count of child BlockNodes of the EditorDocument.\n */\n public get length(): number {",
"score": 29.76167420623889
},
{
"filename": "src/entities/TextNode/index.ts",
"retrieved_chunk": " public format(tool: InlineToolName, start: number, end: number, data?: InlineToolData): InlineNode[] {\n this.#validateIndex(start);\n this.#validateIndex(end);\n const formattingNode = new FormattingNode({\n tool,\n data,\n });\n const fragments: ChildNode[] = [];\n /**\n * If start index is greater than 0, we need extract part of the text before the start index",
"score": 29.38928778991287
},
{
"filename": "src/entities/interfaces/ParentNode.ts",
"retrieved_chunk": " this.#children = children;\n this.children.forEach(child => child.appendTo(this));\n }\n /**\n * Returns node's children\n */\n public get children(): ChildNode[] {\n return this.#children;\n }\n /**",
"score": 27.602935231395485
},
{
"filename": "src/entities/BlockTune/index.ts",
"retrieved_chunk": " */\n public update(key: string, value: unknown): void {\n this.#data[key] = value;\n }\n /**\n * Returns serialized version of the BlockTune.\n */\n public get serialized(): BlockTuneSerialized {\n return {\n name: this.#name,",
"score": 24.90288097171081
}
] | typescript | .children.reduce((sum, child) => sum + child.length, 0); |
import { BlockNode } from './index';
import { createBlockNodeName, createDataKey } from './types';
import { BlockTune, createBlockTuneName } from '../BlockTune';
import { TextNode } from '../TextNode';
import { ValueNode } from '../ValueNode';
import type { EditorDocument } from '../EditorDocument';
import type { BlockTuneConstructorParameters } from '../BlockTune/types';
import type { TextNodeConstructorParameters } from '../TextNode';
import type { ValueNodeConstructorParameters } from '../ValueNode';
describe('BlockNode', () => {
describe('.serialized', () => {
beforeEach(() => {
jest.mock('../BlockTune', () => ({
BlockTune: jest.fn().mockImplementation(() => ({}) as BlockTune),
serialized: jest.fn(),
}));
jest.mock('../TextNode', () => ({
TextNode: jest.fn().mockImplementation(() => ({}) as TextNode),
serialized: jest.fn(),
}));
jest.mock('../ValueNode', () => ({
ValueNode: jest.fn().mockImplementation(() => ({}) as ValueNode),
serialized: jest.fn(),
}));
});
afterEach(() => {
jest.clearAllMocks();
});
it('should return a name of a tool that created a BlockNode', () => {
const blockNodeName = createBlockNodeName('paragraph');
const blockNode = new BlockNode({
name: blockNodeName,
data: {},
parent: {} as EditorDocument,
});
const serialized = blockNode.serialized;
expect(serialized.name).toEqual(blockNodeName);
});
it('should call .serialized getter of all tunes associated with the BlockNode', () => {
const blockTunesNames = [
createBlockTuneName('align'),
createBlockTuneName('font-size'),
createBlockTuneName('font-weight'),
];
const blockTunes = blockTunesNames.reduce((acc, name) => ({
...acc,
[name]: new BlockTune({} as | BlockTuneConstructorParameters),
}), {}); |
const spyArray = Object
.values(blockTunes)
.map((blockTune) => {
return jest.spyOn(blockTune as BlockTune, 'serialized', 'get');
});
const blockNode = new BlockNode({
name: createBlockNodeName('paragraph'),
data: {},
parent: {} as EditorDocument,
tunes: blockTunes,
});
blockNode.serialized;
spyArray.forEach((spy) => {
expect(spy).toHaveBeenCalled();
});
});
it('should call .serialized getter of all child ValueNodes associated with the BlockNode', () => {
const countOfValueNodes = 2;
const valueNodes = [ ...Array(countOfValueNodes).keys() ]
.reduce((acc, index) => ({
...acc,
[createDataKey(`data-key-${index}c${index}d`)]: new ValueNode({} as ValueNodeConstructorParameters),
}), {});
const spyArray = Object
.values(valueNodes)
.map((valueNode) => {
return jest.spyOn(valueNode as ValueNode, 'serialized', 'get');
});
const blockNode = new BlockNode({
name: createBlockNodeName('paragraph'),
data: {
...valueNodes,
},
parent: {} as EditorDocument,
});
blockNode.serialized;
spyArray.forEach((spy) => {
expect(spy).toHaveBeenCalled();
});
});
it('should call .serialized getter of all child TextNodes associated with the BlockNode', () => {
const countOfTextNodes = 3;
const textNodes = [ ...Array(countOfTextNodes).keys() ]
.map(() => new TextNode({} as TextNodeConstructorParameters));
const spyArray = textNodes
.map((textNode) => {
return jest.spyOn(textNode, 'serialized', 'get');
});
const blockNode = new BlockNode({
name: createBlockNodeName('paragraph'),
data: {
[createDataKey('data-key-1a2b')]: textNodes,
},
parent: {} as EditorDocument,
});
blockNode.serialized;
spyArray.forEach((spy) => {
expect(spy).toHaveBeenCalled();
});
});
});
});
| src/entities/BlockNode/BlockNode.spec.ts | editor-js-document-model-4cb9623 | [
{
"filename": "src/entities/BlockNode/index.ts",
"retrieved_chunk": " .entries(this.#tunes)\n .reduce(\n (acc, [name, tune]) => {\n acc[name] = tune.serialized;\n return acc;\n },\n {} as Record<string, BlockTuneSerialized>\n );\n return {\n name: this.#name,",
"score": 27.0820274505007
},
{
"filename": "src/utils/mocks/createBlockTuneMock.ts",
"retrieved_chunk": " data?: Record<string, unknown>,\n}): BlockTune {\n return new BlockTune(\n {\n name: name || createBlockTuneName('aligning'),\n data: data || {},\n }\n );\n}",
"score": 26.00068652553299
},
{
"filename": "src/entities/BlockTune/types/index.ts",
"retrieved_chunk": "export { BlockTuneConstructorParameters } from './BlockTuneConstructorParameters';\nexport { BlockTuneName, createBlockTuneName } from './BlockTuneName';\nexport { BlockTuneSerialized } from './BlockTuneSerialized';",
"score": 20.556843725066088
},
{
"filename": "src/entities/BlockTune/BlockTune.spec.ts",
"retrieved_chunk": " });\n describe('.serialized', () => {\n it('should return serialized version of the BlockTune', () => {\n // Arrange\n const tune = createBlockTuneMock({\n name: createBlockTuneName('styling'),\n data: {\n background: 'transparent',\n },\n });",
"score": 20.249564673934614
},
{
"filename": "src/entities/BlockTune/index.ts",
"retrieved_chunk": "import { BlockTuneConstructorParameters, BlockTuneName, BlockTuneSerialized, createBlockTuneName } from './types';\n/**\n * BlockTune class represents a set of additional information associated with a BlockNode.\n * This information can be used by a BlockTool to modify the behavior of the BlockNode.\n */\nexport class BlockTune {\n /**\n * Private field representing the name of the tune\n */\n #name: BlockTuneName;",
"score": 19.464581712258493
}
] | typescript | BlockTuneConstructorParameters),
}), {}); |
import { describe, it, expect, beforeEach } from '@jest/globals';
import { TextNode } from './index';
import { createInlineToolName, FormattingNode } from '../FormattingNode';
import type { ParentNode } from '../interfaces';
describe('TextNode', () => {
const initialText = 'initial text';
const text = 'some text';
const parentMock = {
insertAfter: jest.fn(),
removeChild: jest.fn(),
append: jest.fn(),
children: [],
} as unknown as ParentNode;
let node: TextNode;
beforeEach(() => {
node = new TextNode({
value: initialText,
parent: parentMock as FormattingNode,
});
});
it('should have empty value by default', () => {
node = new TextNode();
expect(node.getText()).toEqual('');
});
describe('.insertText()', () => {
it('should set text to value if node is empty', () => {
node = new TextNode();
node.insertText(text);
expect(node.getText()).toEqual(text);
});
it('should append text if not empty', () => {
node.insertText(text);
expect(node.getText()).toEqual(initialText + text);
});
it('should prepend text if index is 0 and node is not empty', () => {
node.insertText(text, 0);
expect(node.getText()).toEqual(text + initialText);
});
it('should insert text at index if not empty', () => {
const index = 5;
node.insertText(text, index);
expect(node.getText()).toEqual(initialText.slice(0, index) + text + initialText.slice(index));
});
it('should throw an error if index is less than 0', () => {
const f = (): void => node.insertText(text, -1);
expect(f).toThrowError();
});
it('should throw an error if index is greater than node length', () => {
const f = (): void => node.insertText(text, initialText.length + 1);
expect(f).toThrowError();
});
});
describe('.getText()', () => {
it('should return sliced value if start provided', () => {
const start = 5;
expect(node.getText(start)).toEqual(initialText.slice(start));
});
it('should return sliced value if end provided', () => {
const end = 6;
expect(node.getText(0, end)).toEqual(initialText.slice(0, end));
});
it('should return sliced value if full range provided', () => {
const start = 3;
const end = 9;
expect(node.getText(start, end)).toEqual(initialText.slice(start, end));
});
it('should throw an error if start is invalid index', () => {
expect(() => node.getText(-1)).toThrowError();
expect(() => node.getText(initialText.length + 1)).toThrowError();
});
it('should throw an error if end is invalid index', () => {
expect(() => node.getText(0, initialText.length + 1)).toThrowError();
});
it('should throw an error if end index is greater than start index', () => {
const start = 5;
const end = 3;
expect(() => node.getText(start, end)).toThrowError();
});
it('should not throw an error if end index is equal to start index', () => {
const start = 5;
const end = 5;
expect(() => node.getText(start, end)).not.toThrowError();
});
});
describe('.removeText()', () => {
it('should remove all text by default', () => {
node.removeText();
expect(node.getText()).toEqual('');
});
it('should remove text from specified index', () => {
const start = 3;
node.removeText(start);
expect(node.getText()).toEqual(initialText.slice(0, start));
});
it('should remove text from 0 to specified end index', () => {
const end = 8;
node.removeText(0, end);
expect(node.getText()).toEqual(initialText.slice(end));
});
it('should remove text from specified start and end indecies', () => {
const start = 3;
const end = 8;
node.removeText(start, end);
expect(node.getText()).toEqual(initialText.slice(0, start) + initialText.slice(end));
});
it('should call remove() method if node is empty after removeText() call', () => {
jest.spyOn(node, 'remove');
node.removeText();
expect(node.remove).toBeCalled();
});
});
describe('.format()', () => {
it('should return just one FormattingNode, if formatting full TextNode', () => {
| const name = createInlineToolName('bold'); |
const fragments = node.format(name, 0, initialText.length);
expect(fragments).toHaveLength(1);
expect(fragments[0]).toBeInstanceOf(FormattingNode);
});
it('should return two fragments if formatting from the start, but not to the end', () => {
const name = createInlineToolName('bold');
const end = 5;
const fragments = node.format(name, 0, end);
expect(fragments).toHaveLength(2);
expect(fragments[0]).toBeInstanceOf(FormattingNode);
expect(fragments[1]).toBeInstanceOf(TextNode);
});
it('should return two fragments if formatting to the end, but not from the start', () => {
const name = createInlineToolName('bold');
const start = 5;
const fragments = node.format(name, start, initialText.length);
expect(fragments).toHaveLength(2);
expect(fragments[0]).toBeInstanceOf(TextNode);
expect(fragments[1]).toBeInstanceOf(FormattingNode);
});
it('should return three fragments if formatting in the middle', () => {
const name = createInlineToolName('bold');
const start = 5;
const end = 8;
const fragments = node.format(name, start, end);
// eslint-disable-next-line @typescript-eslint/no-magic-numbers
expect(fragments).toHaveLength(3);
expect(fragments[0]).toBeInstanceOf(TextNode);
expect(fragments[1]).toBeInstanceOf(FormattingNode);
expect(fragments[2]).toBeInstanceOf(TextNode);
});
it('should return FormattingNode with a TextNode as a child with correct text value', () => {
const name = createInlineToolName('bold');
const start = 5;
const end = 8;
const fragments = node.format(name, start, end);
const formattingNode = fragments[1] as FormattingNode;
expect(formattingNode.children[0].getText()).toEqual(initialText.slice(start, end));
});
it('should call parent\'s insertAfter with new nodes', () => {
const name = createInlineToolName('bold');
const start = 5;
const end = 8;
const fragments = node.format(name, start, end);
expect(parentMock.insertAfter).toBeCalledWith(node, ...fragments);
});
});
describe('.split()', () => {
const index = 5;
it('should not split (return null) if index is 0', () => {
const newNode = node.split(0);
expect(newNode).toBeNull();
});
it('should not split (return null) if index equals text length', () => {
const newNode = node.split(initialText.length);
expect(newNode).toBeNull();
});
it('should create new TextNode on split', () => {
const newNode = node.split(index);
expect(newNode).toBeInstanceOf(TextNode);
});
it('should create new TextNode with text value splitted from the original one', () => {
const newNode = node.split(index);
expect(newNode?.getText()).toEqual(initialText.slice(index));
});
it('should remove split text value from the original node', () => {
node.split(index);
expect(node.getText()).toEqual(initialText.slice(0, index));
});
it('should insert new node to the parent', () => {
const newNode = node.split(index);
expect(parentMock.insertAfter).toBeCalledWith(node, newNode);
});
});
describe('.serialized', () => {
it('should return text value and empty array of fragments', () => {
const result = node.serialized;
expect(result).toEqual({
text: initialText,
fragments: [],
});
});
});
});
| src/entities/TextNode/TextNode.spec.ts | editor-js-document-model-4cb9623 | [
{
"filename": "src/entities/FormattingNode/FormattingNode.spec.ts",
"retrieved_chunk": " it('should remove all text by default', () => {\n node.removeText();\n expect(childMock.removeText).toBeCalledWith(0, childMock.length);\n expect(anotherChildMock.removeText).toBeCalledWith(0, anotherChildMock.length);\n });\n it('should call remove() if length is 0 after removeText() call', () => {\n const removeSpy = jest.spyOn(node, 'remove');\n const lengthSpy = jest.spyOn(node, 'length', 'get').mockImplementation(() => 0);\n node.removeText();\n expect(removeSpy).toBeCalled();",
"score": 53.306858564901006
},
{
"filename": "src/entities/FormattingNode/FormattingNode.spec.ts",
"retrieved_chunk": " expect(anotherChildMock.format).toBeCalledWith(anotherTool, 0, end, undefined);\n });\n it('should do nothing if same tool is being applied', () => {\n node.format(tool, start, end);\n expect(childMock.format).not.toBeCalled();\n expect(anotherChildMock.format).not.toBeCalled();\n });\n it('should return empty array if same tool is being applied', () => {\n const result = node.format(tool, start, end);\n expect(result).toHaveLength(0);",
"score": 28.76852073146425
},
{
"filename": "src/entities/FormattingNode/FormattingNode.spec.ts",
"retrieved_chunk": " });\n });\n describe('.removeText()', () => {\n const start = 3;\n const end = 5;\n it('should remove text from appropriate child', () => {\n node.removeText(start, end);\n expect(childMock.removeText).toBeCalledWith(start, end);\n });\n it('should adjust indices by child offset', () => {",
"score": 28.374240880910566
},
{
"filename": "src/entities/interfaces/ChildNode.spec.ts",
"retrieved_chunk": " dummy = new Dummy({\n parent: parentMock,\n });\n });\n it('should call parent\\'s removeChild method', () => {\n dummy.remove();\n expect(parentMock.removeChild).toBeCalledWith(dummy);\n });\n it('should set node\\'s parent to null', () => {\n dummy.remove();",
"score": 27.802269769494714
},
{
"filename": "src/entities/FormattingNode/FormattingNode.spec.ts",
"retrieved_chunk": " const offset = childMock.length;\n node.removeText(offset + start, offset + end);\n expect(anotherChildMock.removeText).toBeCalledWith(start, end);\n });\n it('should call removeText for each affected child', () => {\n const offset = childMock.length;\n node.removeText(start, offset + end);\n expect(childMock.removeText).toBeCalledWith(start, offset);\n expect(anotherChildMock.removeText).toBeCalledWith(0, end);\n });",
"score": 25.709548458314057
}
] | typescript | const name = createInlineToolName('bold'); |
import { BlockNode } from './index';
import { createBlockNodeName, createDataKey } from './types';
import { BlockTune, createBlockTuneName } from '../BlockTune';
import { TextNode } from '../TextNode';
import { ValueNode } from '../ValueNode';
import type { EditorDocument } from '../EditorDocument';
import type { BlockTuneConstructorParameters } from '../BlockTune/types';
import type { TextNodeConstructorParameters } from '../TextNode';
import type { ValueNodeConstructorParameters } from '../ValueNode';
describe('BlockNode', () => {
describe('.serialized', () => {
beforeEach(() => {
jest.mock('../BlockTune', () => ({
BlockTune: jest.fn().mockImplementation(() => ({}) as BlockTune),
serialized: jest.fn(),
}));
jest.mock('../TextNode', () => ({
TextNode: jest.fn().mockImplementation(() => ({}) as TextNode),
serialized: jest.fn(),
}));
jest.mock('../ValueNode', () => ({
ValueNode: jest.fn().mockImplementation(() => ({}) as ValueNode),
serialized: jest.fn(),
}));
});
afterEach(() => {
jest.clearAllMocks();
});
it('should return a name of a tool that created a BlockNode', () => {
const blockNodeName = createBlockNodeName('paragraph');
const blockNode = new BlockNode({
name: blockNodeName,
data: {},
parent: {} as EditorDocument,
});
const serialized = blockNode.serialized;
expect(serialized.name).toEqual(blockNodeName);
});
it('should call .serialized getter of all tunes associated with the BlockNode', () => {
const blockTunesNames = [
createBlockTuneName('align'),
createBlockTuneName('font-size'),
createBlockTuneName('font-weight'),
];
const blockTunes = blockTunesNames.reduce((acc, name) => ({
...acc,
[name]: new BlockTune({} as BlockTuneConstructorParameters),
}), {});
const spyArray = Object
.values(blockTunes)
.map((blockTune) => {
return jest.spyOn(blockTune as BlockTune, 'serialized', 'get');
});
const blockNode = new BlockNode({
name: createBlockNodeName('paragraph'),
data: {},
parent: {} as EditorDocument,
tunes: blockTunes,
});
blockNode.serialized;
spyArray.forEach((spy) => {
expect(spy).toHaveBeenCalled();
});
});
it('should call .serialized getter of all child ValueNodes associated with the BlockNode', () => {
const countOfValueNodes = 2;
const valueNodes = [ ...Array(countOfValueNodes).keys() ]
.reduce((acc, index) => ({
...acc,
| [createDataKey(`data-key-${index}c${index}d`)]: new ValueNode({} as ValueNodeConstructorParameters),
}), {}); |
const spyArray = Object
.values(valueNodes)
.map((valueNode) => {
return jest.spyOn(valueNode as ValueNode, 'serialized', 'get');
});
const blockNode = new BlockNode({
name: createBlockNodeName('paragraph'),
data: {
...valueNodes,
},
parent: {} as EditorDocument,
});
blockNode.serialized;
spyArray.forEach((spy) => {
expect(spy).toHaveBeenCalled();
});
});
it('should call .serialized getter of all child TextNodes associated with the BlockNode', () => {
const countOfTextNodes = 3;
const textNodes = [ ...Array(countOfTextNodes).keys() ]
.map(() => new TextNode({} as TextNodeConstructorParameters));
const spyArray = textNodes
.map((textNode) => {
return jest.spyOn(textNode, 'serialized', 'get');
});
const blockNode = new BlockNode({
name: createBlockNodeName('paragraph'),
data: {
[createDataKey('data-key-1a2b')]: textNodes,
},
parent: {} as EditorDocument,
});
blockNode.serialized;
spyArray.forEach((spy) => {
expect(spy).toHaveBeenCalled();
});
});
});
});
| src/entities/BlockNode/BlockNode.spec.ts | editor-js-document-model-4cb9623 | [
{
"filename": "src/entities/BlockNode/index.ts",
"retrieved_chunk": " if (value instanceof Array) {\n acc[dataKey] = value.map((node) => node.serialized);\n return acc;\n }\n acc[dataKey] = value.serialized;\n return acc;\n },\n {} as Record<string, unknown>\n );\n const serializedTunes = Object",
"score": 24.630792472648107
},
{
"filename": "src/entities/BlockNode/index.ts",
"retrieved_chunk": " .entries(this.#tunes)\n .reduce(\n (acc, [name, tune]) => {\n acc[name] = tune.serialized;\n return acc;\n },\n {} as Record<string, BlockTuneSerialized>\n );\n return {\n name: this.#name,",
"score": 22.572997485534355
},
{
"filename": "src/entities/ValueNode/index.ts",
"retrieved_chunk": "import type { ValueNodeConstructorParameters } from './types';\n/**\n * ValueNode class represents a node in a tree-like structure, used to store and manipulate data associated with a BlockNode.\n * Unlike TextNode, changing the data of a ValueNode will replace the entire data in this node.\n * This can be useful for storing data that needs to be updated in its entirety, such as a link or other metadata associated with a BlockNode.\n */\nexport class ValueNode<ValueType = unknown> {\n /**\n * Private field representing the data associated with this node\n */",
"score": 17.794458051455653
},
{
"filename": "src/entities/FormattingNode/index.ts",
"retrieved_chunk": " return [];\n }\n return this.#reduceChildrenInRange<InlineNode[]>(\n start,\n end,\n (acc, child, childStart, childEnd) => {\n acc.push(...child.format(tool, childStart, childEnd, data));\n return acc;\n },\n []",
"score": 15.690400029421978
},
{
"filename": "src/entities/BlockNode/index.ts",
"retrieved_chunk": " public get serialized(): BlockNodeSerialized {\n const serializedData = Object\n .entries(this.#data)\n .reduce(\n (acc, [dataKey, value]) => {\n /**\n * If the value is an array, we need to serialize each node in the array\n * Value is an array if the BlockNode contains TextNodes and FormattingNodes\n * After serializing there will be InlineNodeSerialized object\n */",
"score": 14.424830895467284
}
] | typescript | [createDataKey(`data-key-${index}c${index}d`)]: new ValueNode({} as ValueNodeConstructorParameters),
}), {}); |
import { EditorDocument } from './index';
import { BlockNode } from '../BlockNode';
import { createBlockNodeMock } from '../../utils/mocks/createBlockNodeMock';
import { createEditorDocumentMock } from '../../utils/mocks/createEditorDocumentMock';
/**
* Creates an EditorDocument object with some blocks for tests.
*/
function createEditorDocumentMockWithSomeBlocks(): EditorDocument {
const document = createEditorDocumentMock();
const countOfBlocks = 3;
for (let i = 0; i < countOfBlocks; i++) {
const block = createBlockNodeMock({
parent: document,
});
document.addBlock(block);
}
return document;
}
describe('EditorDocument', () => {
describe('.length', () => {
it('should return the number of blocks in the document', () => {
// Arrange
const blocksCount = 3;
const document = new EditorDocument({
children: [],
properties: {
readOnly: false,
},
});
for (let i = 0; i < blocksCount; i++) {
const block = createBlockNodeMock({
parent: document,
});
document.addBlock(block);
}
// Act
const actual = document.length;
// Assert
expect(actual).toBe(blocksCount);
});
});
describe('.addBlock()', () => {
it('should add the block to the end of the document if index is not provided', () => {
// Arrange
const document = createEditorDocumentMockWithSomeBlocks();
const block = createBlockNodeMock({
parent: document,
});
// Act
document.addBlock(block);
// Assert
const lastBlock = document.getBlock(document.length - 1);
expect(lastBlock).toBe(block);
});
it('should add the block to the beginning of the document if index is 0', () => {
// Arrange
const document = createEditorDocumentMockWithSomeBlocks();
const block = createBlockNodeMock({
parent: document,
});
// Act
document.addBlock(block, 0);
// Assert
expect(document.getBlock(0)).toBe(block);
});
it('should add the block to the specified index in the middle of the document', () => {
// Arrange
const document = createEditorDocumentMockWithSomeBlocks();
const block = createBlockNodeMock({
parent: document,
});
// Act
document.addBlock(block, 1);
// Assert
expect(document.getBlock(1)).toBe(block);
});
it('should add the block to the end of the document if the index after the last element is passed', () => {
// Arrange
const document = createEditorDocumentMockWithSomeBlocks();
const block = createBlockNodeMock({
parent: document,
});
// Act
document.addBlock(block, document.length);
// Assert
const lastBlock = document.getBlock(document.length - 1);
expect(lastBlock).toBe(block);
});
it('should throw an error if index is less then 0', () => {
// Arrange
const document = createEditorDocumentMockWithSomeBlocks();
const block = createBlockNodeMock({
parent: document,
});
// Act
const action = (): void => document.addBlock(block, -1);
// Assert
expect(action).toThrowError('Index out of bounds');
});
it('should throw an error if index is greater then document length', () => {
// Arrange
const document = createEditorDocumentMockWithSomeBlocks();
const block = createBlockNodeMock({
parent: document,
});
// Act
const action = (): void => document.addBlock(block, document.length + 1);
// Assert
expect(action).toThrowError('Index out of bounds');
});
});
describe('.removeBlock()', () => {
it('should remove the block from the beginning of the document if index 0 is passed', () => {
// Arrange
const document = createEditorDocumentMockWithSomeBlocks();
const block = document.getBlock(0);
// Act
document.removeBlock(0);
// Assert
expect(document.getBlock(0)).not.toBe(block);
});
it('should remove the block from the specified index in the middle of the document', () => {
// Arrange
const document = createEditorDocumentMockWithSomeBlocks();
const block = document.getBlock(1);
// Act
document.removeBlock(1);
// Assert
expect(document.getBlock(1)).not.toBe(block);
});
it('should remove the block from the end of the document if the last index is passed', () => {
// Arrange
const document = createEditorDocumentMockWithSomeBlocks();
const documentLengthBeforeRemove = document.length;
// Act
document.removeBlock(document.length - 1);
// Assert
expect(document.length).toBe(documentLengthBeforeRemove - 1);
});
it('should throw an error if index is less then 0', () => {
// Arrange
const document = createEditorDocumentMockWithSomeBlocks();
// Act
const action = (): void => document.removeBlock(-1);
// Assert
expect(action).toThrowError('Index out of bounds');
});
it('should throw an error if index is greater then document length', () => {
// Arrange
const document = createEditorDocumentMockWithSomeBlocks();
// Act
const action = (): void => document.removeBlock(document.length);
// Assert
expect(action).toThrowError('Index out of bounds');
});
});
describe('.getBlock()', () => {
it('should return the block from the specific index', () => {
// Arrange
const document = createEditorDocumentMock();
const countOfBlocks = 3;
| const blocks: BlockNode[] = []; |
for (let i = 0; i < countOfBlocks; i++) {
const block = createBlockNodeMock({
parent: document,
});
document.addBlock(block);
blocks.push(block);
}
const index = 1;
// Act
const block = document.getBlock(index);
// Assert
expect(block).toBe(blocks[index]);
});
it('should throw an error if index is less then 0', () => {
// Arrange
const document = createEditorDocumentMockWithSomeBlocks();
// Act
const action = (): BlockNode => document.getBlock(-1);
// Assert
expect(action).toThrowError('Index out of bounds');
});
it('should throw an error if index is greater then document length', () => {
// Arrange
const document = createEditorDocumentMockWithSomeBlocks();
// Act
const action = (): BlockNode => document.getBlock(document.length);
// Assert
expect(action).toThrowError('Index out of bounds');
});
});
});
| src/entities/EditorDocument/EditorDocument.spec.ts | editor-js-document-model-4cb9623 | [
{
"filename": "src/entities/EditorDocument/index.ts",
"retrieved_chunk": " * Throws an error if the index is out of bounds.\n *\n * @param index - The index of the BlockNode to return\n * @throws Error if the index is out of bounds\n */\n public getBlock(index: number): BlockNode {\n this.#checkIndexOutOfBounds(index, this.length - 1);\n return this.#children[index];\n }\n /**",
"score": 21.52899249863127
},
{
"filename": "src/entities/EditorDocument/index.ts",
"retrieved_chunk": " * Checks if the index is out of bounds.\n *\n * @param index - The index to check\n * @param max - The maximum index value. Defaults to the length of the children array.\n * @throws Error if the index is out of bounds\n */\n #checkIndexOutOfBounds(index: number, max: number = this.length): void {\n if (index < 0 || index > max) {\n throw new Error('Index out of bounds');\n }",
"score": 17.566129020900895
},
{
"filename": "src/entities/ValueNode/ValueNode.spec.ts",
"retrieved_chunk": " longitudeValueNode.update(updatedLongitude);\n // Assert\n expect(longitudeValueNode.serialized).toBe(updatedLongitude);\n });\n });\n describe('.serialized', () => {\n it('should return the serialized data associated with this value node', () => {\n // Arrange\n const longitude = 23.123;\n const longitudeValueNode = createValueNodeMock({",
"score": 14.610811186361293
},
{
"filename": "src/entities/TextNode/TextNode.spec.ts",
"retrieved_chunk": " });\n it('should return sliced value if full range provided', () => {\n const start = 3;\n const end = 9;\n expect(node.getText(start, end)).toEqual(initialText.slice(start, end));\n });\n it('should throw an error if start is invalid index', () => {\n expect(() => node.getText(-1)).toThrowError();\n expect(() => node.getText(initialText.length + 1)).toThrowError();\n });",
"score": 13.518283500332048
},
{
"filename": "src/entities/EditorDocument/index.ts",
"retrieved_chunk": " *\n * @param index - The index of the BlockNode to remove\n * @throws Error if the index is out of bounds\n */\n public removeBlock(index: number): void {\n this.#checkIndexOutOfBounds(index, this.length - 1);\n this.#children.splice(index, 1);\n }\n /**\n * Returns the BlockNode at the specified index.",
"score": 12.939230034641524
}
] | typescript | const blocks: BlockNode[] = []; |
import { EditorDocument } from '../EditorDocument';
import { BlockTune, BlockTuneName, BlockTuneSerialized } from '../BlockTune';
import {
BlockNodeConstructorParameters,
BlockNodeName,
createBlockNodeName,
DataKey,
createDataKey, BlockNodeData,
BlockNodeSerialized
} from './types';
/**
* BlockNode class represents a node in a tree-like structure used to store and manipulate Blocks in an editor document.
* A BlockNode can contain one or more child nodes of type TextNode, ValueNode or FormattingNode.
* It can also be associated with one or more BlockTunes, which can modify the behavior of the BlockNode.
*/
export class BlockNode {
/**
* Field representing a name of the Tool created this Block
*/
#name: BlockNodeName;
/**
* Field representing the content of the Block
*/
#data: BlockNodeData;
/**
* Field representing the parent EditorDocument of the BlockNode
*/
#parent: EditorDocument;
/**
* Private field representing the BlockTunes associated with the BlockNode
*/
#tunes: Record<BlockTuneName, BlockTune>;
/**
* Constructor for BlockNode class.
*
* @param args - TextNode constructor arguments.
* @param args.name - The name of the BlockNode.
* @param args.data - The content of the BlockNode.
* @param args.parent - The parent EditorDocument of the BlockNode.
* @param args.tunes - The BlockTunes associated with the BlockNode.
*/
constructor({ name, data, parent, tunes = {} }: BlockNodeConstructorParameters) {
this.#name = name;
this.#data = data;
this.#parent = parent;
this.#tunes = tunes;
}
/**
* Returns serialized object representing the BlockNode
*/
public get serialized(): BlockNodeSerialized {
const serializedData = Object
.entries(this.#data)
.reduce(
(acc, [dataKey, value]) => {
/**
* If the value is an array, we need to serialize each node in the array
* Value is an array if the BlockNode contains TextNodes and FormattingNodes
* After serializing there will be InlineNodeSerialized object
*/
if (value instanceof Array) {
acc[dataKey] = value.map((node) => node.serialized);
return acc;
}
acc[dataKey] = value.serialized;
return acc;
},
{} as Record<string, unknown>
);
const serializedTunes = Object
.entries(this.#tunes)
.reduce(
(acc, [name, tune]) => {
acc[name] = tune.serialized;
return acc;
},
{} as Record | <string, BlockTuneSerialized>
); |
return {
name: this.#name,
data: serializedData,
tunes: serializedTunes,
};
}
}
export {
BlockNodeName,
createBlockNodeName,
DataKey,
createDataKey
};
| src/entities/BlockNode/index.ts | editor-js-document-model-4cb9623 | [
{
"filename": "src/entities/BlockNode/BlockNode.spec.ts",
"retrieved_chunk": " createBlockTuneName('align'),\n createBlockTuneName('font-size'),\n createBlockTuneName('font-weight'),\n ];\n const blockTunes = blockTunesNames.reduce((acc, name) => ({\n ...acc,\n [name]: new BlockTune({} as BlockTuneConstructorParameters),\n }), {});\n const spyArray = Object\n .values(blockTunes)",
"score": 33.30014296881142
},
{
"filename": "src/entities/BlockTune/types/BlockTuneSerialized.ts",
"retrieved_chunk": "/**\n * BlockTuneSerialized represents a serialized version of a BlockTune.\n */\nexport interface BlockTuneSerialized {\n /**\n * The name of the tune.\n * Serialized as a string.\n */\n name: string;\n /**",
"score": 26.76837894597172
},
{
"filename": "src/entities/FormattingNode/index.ts",
"retrieved_chunk": " return [];\n }\n return this.#reduceChildrenInRange<InlineNode[]>(\n start,\n end,\n (acc, child, childStart, childEnd) => {\n acc.push(...child.format(tool, childStart, childEnd, data));\n return acc;\n },\n []",
"score": 21.175772522381926
},
{
"filename": "src/entities/BlockTune/types/BlockTuneConstructorParameters.ts",
"retrieved_chunk": "import { BlockTuneName } from './BlockTuneName';\nexport interface BlockTuneConstructorParameters {\n /**\n * The name of the tune\n */\n name: BlockTuneName;\n /**\n * Any additional data associated with the tune\n */\n data: Record<string, unknown>;",
"score": 20.703069473990997
},
{
"filename": "src/entities/BlockNode/BlockNode.spec.ts",
"retrieved_chunk": " spyArray.forEach((spy) => {\n expect(spy).toHaveBeenCalled();\n });\n });\n it('should call .serialized getter of all child ValueNodes associated with the BlockNode', () => {\n const countOfValueNodes = 2;\n const valueNodes = [ ...Array(countOfValueNodes).keys() ]\n .reduce((acc, index) => ({\n ...acc,\n [createDataKey(`data-key-${index}c${index}d`)]: new ValueNode({} as ValueNodeConstructorParameters),",
"score": 20.64791867222776
}
] | typescript | <string, BlockTuneSerialized>
); |
import { BlockNode } from './index';
import { createBlockNodeName, createDataKey } from './types';
import { BlockTune, createBlockTuneName } from '../BlockTune';
import { TextNode } from '../TextNode';
import { ValueNode } from '../ValueNode';
import type { EditorDocument } from '../EditorDocument';
import type { BlockTuneConstructorParameters } from '../BlockTune/types';
import type { TextNodeConstructorParameters } from '../TextNode';
import type { ValueNodeConstructorParameters } from '../ValueNode';
describe('BlockNode', () => {
describe('.serialized', () => {
beforeEach(() => {
jest.mock('../BlockTune', () => ({
BlockTune: jest.fn().mockImplementation(() => ({}) as BlockTune),
serialized: jest.fn(),
}));
jest.mock('../TextNode', () => ({
TextNode: jest.fn().mockImplementation(() => ({}) as TextNode),
serialized: jest.fn(),
}));
jest.mock('../ValueNode', () => ({
ValueNode: jest.fn().mockImplementation(() => ({}) as ValueNode),
serialized: jest.fn(),
}));
});
afterEach(() => {
jest.clearAllMocks();
});
it('should return a name of a tool that created a BlockNode', () => {
const blockNodeName = createBlockNodeName('paragraph');
const blockNode = new BlockNode({
name: blockNodeName,
data: {},
parent: {} as EditorDocument,
});
const serialized = blockNode.serialized;
expect(serialized.name).toEqual(blockNodeName);
});
it('should call .serialized getter of all tunes associated with the BlockNode', () => {
const blockTunesNames = [
createBlockTuneName('align'),
createBlockTuneName('font-size'),
createBlockTuneName('font-weight'),
];
const blockTunes = blockTunesNames.reduce((acc, name) => ({
...acc,
[name]: new BlockTune({} as BlockTuneConstructorParameters),
}), {});
const spyArray = Object
.values(blockTunes)
.map((blockTune) => {
return jest.spyOn(blockTune as BlockTune, 'serialized', 'get');
});
const blockNode = new BlockNode({
name: createBlockNodeName('paragraph'),
data: {},
parent: {} as EditorDocument,
tunes: blockTunes,
});
blockNode.serialized;
spyArray.forEach((spy) => {
expect(spy).toHaveBeenCalled();
});
});
it('should call .serialized getter of all child ValueNodes associated with the BlockNode', () => {
const countOfValueNodes = 2;
const valueNodes = [ ...Array(countOfValueNodes).keys() ]
.reduce((acc, index) => ({
...acc,
[createDataKey(`data-key-${index}c${index}d`)]: new ValueNode({} as ValueNodeConstructorParameters),
}), {});
const spyArray = Object
.values(valueNodes)
.map((valueNode) => {
return jest.spyOn(valueNode as ValueNode, 'serialized', 'get');
});
const blockNode = new BlockNode({
name: createBlockNodeName('paragraph'),
data: {
...valueNodes,
},
parent: {} as EditorDocument,
});
blockNode.serialized;
spyArray.forEach((spy) => {
expect(spy).toHaveBeenCalled();
});
});
it('should call .serialized getter of all child TextNodes associated with the BlockNode', () => {
const countOfTextNodes = 3;
const textNodes = [ ...Array(countOfTextNodes).keys() ]
.map(() => new TextNode( | {} as TextNodeConstructorParameters)); |
const spyArray = textNodes
.map((textNode) => {
return jest.spyOn(textNode, 'serialized', 'get');
});
const blockNode = new BlockNode({
name: createBlockNodeName('paragraph'),
data: {
[createDataKey('data-key-1a2b')]: textNodes,
},
parent: {} as EditorDocument,
});
blockNode.serialized;
spyArray.forEach((spy) => {
expect(spy).toHaveBeenCalled();
});
});
});
});
| src/entities/BlockNode/BlockNode.spec.ts | editor-js-document-model-4cb9623 | [
{
"filename": "src/entities/BlockNode/index.ts",
"retrieved_chunk": " if (value instanceof Array) {\n acc[dataKey] = value.map((node) => node.serialized);\n return acc;\n }\n acc[dataKey] = value.serialized;\n return acc;\n },\n {} as Record<string, unknown>\n );\n const serializedTunes = Object",
"score": 21.643261644313686
},
{
"filename": "src/entities/ValueNode/ValueNode.spec.ts",
"retrieved_chunk": " longitudeValueNode.update(updatedLongitude);\n // Assert\n expect(longitudeValueNode.serialized).toBe(updatedLongitude);\n });\n });\n describe('.serialized', () => {\n it('should return the serialized data associated with this value node', () => {\n // Arrange\n const longitude = 23.123;\n const longitudeValueNode = createValueNodeMock({",
"score": 14.974237391098567
},
{
"filename": "src/entities/FormattingNode/FormattingNode.spec.ts",
"retrieved_chunk": " describe('.serialized', () => {\n it('should return concatenated text of all fragments with fragments list describing formatting', () => {\n const result = node.serialized;\n expect(result).toEqual({\n text: childMock.getText() + anotherChildMock.getText(),\n fragments: [\n {\n tool,\n data,\n range: [0, node.length],",
"score": 14.499459894455152
},
{
"filename": "src/entities/TextNode/TextNode.spec.ts",
"retrieved_chunk": " expect(fragments).toHaveLength(3);\n expect(fragments[0]).toBeInstanceOf(TextNode);\n expect(fragments[1]).toBeInstanceOf(FormattingNode);\n expect(fragments[2]).toBeInstanceOf(TextNode);\n });\n it('should return FormattingNode with a TextNode as a child with correct text value', () => {\n const name = createInlineToolName('bold');\n const start = 5;\n const end = 8;\n const fragments = node.format(name, start, end);",
"score": 14.016795938416765
},
{
"filename": "src/entities/BlockNode/types/BlockNodeSerialized.ts",
"retrieved_chunk": " * The content of the Block\n */\n data: Record<string, unknown>; // @todo replace unknown type with serialized root node and value node\n /**\n * Serialized BlockTunes associated with the BlockNode\n */\n tunes: Record<string, BlockTuneSerialized>;\n}",
"score": 12.81043056071841
}
] | typescript | {} as TextNodeConstructorParameters)); |
import { BlockNode } from './index';
import { createBlockNodeName, createDataKey } from './types';
import { BlockTune, createBlockTuneName } from '../BlockTune';
import { TextNode } from '../TextNode';
import { ValueNode } from '../ValueNode';
import type { EditorDocument } from '../EditorDocument';
import type { BlockTuneConstructorParameters } from '../BlockTune/types';
import type { TextNodeConstructorParameters } from '../TextNode';
import type { ValueNodeConstructorParameters } from '../ValueNode';
describe('BlockNode', () => {
describe('.serialized', () => {
beforeEach(() => {
jest.mock('../BlockTune', () => ({
BlockTune: jest.fn().mockImplementation(() => ({}) as BlockTune),
serialized: jest.fn(),
}));
jest.mock('../TextNode', () => ({
TextNode: jest.fn().mockImplementation(() => ({}) as TextNode),
serialized: jest.fn(),
}));
jest.mock('../ValueNode', () => ({
ValueNode: jest.fn().mockImplementation(() => ({}) as ValueNode),
serialized: jest.fn(),
}));
});
afterEach(() => {
jest.clearAllMocks();
});
it('should return a name of a tool that created a BlockNode', () => {
const blockNodeName = createBlockNodeName('paragraph');
const blockNode = new BlockNode({
name: blockNodeName,
data: {},
parent: {} as EditorDocument,
});
const serialized = blockNode.serialized;
expect(serialized.name).toEqual(blockNodeName);
});
it('should call .serialized getter of all tunes associated with the BlockNode', () => {
const blockTunesNames = [
createBlockTuneName('align'),
createBlockTuneName('font-size'),
createBlockTuneName('font-weight'),
];
const blockTunes = blockTunesNames.reduce((acc, name) => ({
...acc,
[name]: new BlockTune({} as BlockTuneConstructorParameters),
}), {});
const spyArray = Object
.values(blockTunes)
.map((blockTune) => {
return jest.spyOn(blockTune as BlockTune, 'serialized', 'get');
});
const blockNode = new BlockNode({
| name: createBlockNodeName('paragraph'),
data: {},
parent: {} as EditorDocument,
tunes: blockTunes,
}); |
blockNode.serialized;
spyArray.forEach((spy) => {
expect(spy).toHaveBeenCalled();
});
});
it('should call .serialized getter of all child ValueNodes associated with the BlockNode', () => {
const countOfValueNodes = 2;
const valueNodes = [ ...Array(countOfValueNodes).keys() ]
.reduce((acc, index) => ({
...acc,
[createDataKey(`data-key-${index}c${index}d`)]: new ValueNode({} as ValueNodeConstructorParameters),
}), {});
const spyArray = Object
.values(valueNodes)
.map((valueNode) => {
return jest.spyOn(valueNode as ValueNode, 'serialized', 'get');
});
const blockNode = new BlockNode({
name: createBlockNodeName('paragraph'),
data: {
...valueNodes,
},
parent: {} as EditorDocument,
});
blockNode.serialized;
spyArray.forEach((spy) => {
expect(spy).toHaveBeenCalled();
});
});
it('should call .serialized getter of all child TextNodes associated with the BlockNode', () => {
const countOfTextNodes = 3;
const textNodes = [ ...Array(countOfTextNodes).keys() ]
.map(() => new TextNode({} as TextNodeConstructorParameters));
const spyArray = textNodes
.map((textNode) => {
return jest.spyOn(textNode, 'serialized', 'get');
});
const blockNode = new BlockNode({
name: createBlockNodeName('paragraph'),
data: {
[createDataKey('data-key-1a2b')]: textNodes,
},
parent: {} as EditorDocument,
});
blockNode.serialized;
spyArray.forEach((spy) => {
expect(spy).toHaveBeenCalled();
});
});
});
});
| src/entities/BlockNode/BlockNode.spec.ts | editor-js-document-model-4cb9623 | [
{
"filename": "src/utils/mocks/createBlockNodeMock.ts",
"retrieved_chunk": " */\nexport function createBlockNodeMock({ name, parent, tunes, data }: { name?: BlockNodeName, parent: EditorDocument, data?: BlockNodeData, tunes?: Record<BlockTuneName, BlockTune> }): BlockNode {\n return new BlockNode({\n name: name || createBlockNodeName('header'),\n parent,\n data: data || {},\n tunes: tunes || {},\n });\n}",
"score": 29.40860158690665
},
{
"filename": "src/entities/BlockTune/BlockTune.spec.ts",
"retrieved_chunk": " blockTune.update('align', 'left');\n // Assert\n expect(blockTune.serialized.data).toEqual({\n align: 'left',\n });\n });\n it('should update field in data object by key', () => {\n // Arrange\n const blockTune = createBlockTuneMock({\n data: {",
"score": 20.213474085742366
},
{
"filename": "src/entities/BlockNode/index.ts",
"retrieved_chunk": " */\n constructor({ name, data, parent, tunes = {} }: BlockNodeConstructorParameters) {\n this.#name = name;\n this.#data = data;\n this.#parent = parent;\n this.#tunes = tunes;\n }\n /**\n * Returns serialized object representing the BlockNode\n */",
"score": 19.682300992825915
},
{
"filename": "src/entities/BlockTune/BlockTune.spec.ts",
"retrieved_chunk": " align: 'center',\n },\n });\n // Act\n blockTune.update('align', 'right');\n // Assert\n expect(blockTune.serialized.data).toEqual({\n align: 'right',\n });\n });",
"score": 19.582491583958493
},
{
"filename": "src/entities/BlockNode/index.ts",
"retrieved_chunk": " .entries(this.#tunes)\n .reduce(\n (acc, [name, tune]) => {\n acc[name] = tune.serialized;\n return acc;\n },\n {} as Record<string, BlockTuneSerialized>\n );\n return {\n name: this.#name,",
"score": 18.37973747297198
}
] | typescript | name: createBlockNodeName('paragraph'),
data: {},
parent: {} as EditorDocument,
tunes: blockTunes,
}); |
import { BlockTuneConstructorParameters, BlockTuneName, BlockTuneSerialized, createBlockTuneName } from './types';
/**
* BlockTune class represents a set of additional information associated with a BlockNode.
* This information can be used by a BlockTool to modify the behavior of the BlockNode.
*/
export class BlockTune {
/**
* Private field representing the name of the tune
*/
#name: BlockTuneName;
/**
* Private field representing any additional data associated with the tune
*/
#data: Record<string, unknown>;
/**
* Constructor for BlockTune class.
*
* @param args - BlockTune constructor arguments.
* @param args.name - The name of the tune.
* @param args.data - Any additional data associated with the tune.
*/
constructor({ name, data }: BlockTuneConstructorParameters) {
this.#name = name;
this.#data = data;
}
/**
* Updates data associated with the tune.
*
* @param key - The key of the data to update
* @param value - The value to update the data with
*/
public update(key: string, value: unknown): void {
this.#data[key] = value;
}
/**
* Returns serialized version of the BlockTune.
*/
public get serialized() | : BlockTuneSerialized { |
return {
name: this.#name,
data: this.#data,
};
}
}
export {
BlockTuneName,
createBlockTuneName
};
export type {
BlockTuneSerialized
};
| src/entities/BlockTune/index.ts | editor-js-document-model-4cb9623 | [
{
"filename": "src/entities/ValueNode/index.ts",
"retrieved_chunk": " /**\n * Updates the data associated with this value node.\n *\n * @param value - The new value of this value node.\n */\n public update(value: ValueType): void {\n this.#value = value;\n }\n /**\n * Returns serialized data associated with this value node.",
"score": 45.659705763293886
},
{
"filename": "src/entities/BlockNode/types/BlockNodeSerialized.ts",
"retrieved_chunk": " * The content of the Block\n */\n data: Record<string, unknown>; // @todo replace unknown type with serialized root node and value node\n /**\n * Serialized BlockTunes associated with the BlockNode\n */\n tunes: Record<string, BlockTuneSerialized>;\n}",
"score": 29.63997595318409
},
{
"filename": "src/entities/BlockTune/BlockTune.spec.ts",
"retrieved_chunk": " blockTune.update('align', 'left');\n // Assert\n expect(blockTune.serialized.data).toEqual({\n align: 'left',\n });\n });\n it('should update field in data object by key', () => {\n // Arrange\n const blockTune = createBlockTuneMock({\n data: {",
"score": 28.533878747776203
},
{
"filename": "src/entities/TextNode/index.ts",
"retrieved_chunk": " * Returns serialized value of the node\n */\n public get serialized(): InlineNodeSerialized {\n return {\n text: this.getText(),\n // No fragments for text node\n fragments: [],\n };\n }\n /**",
"score": 27.573093788894017
},
{
"filename": "src/entities/ValueNode/ValueNode.spec.ts",
"retrieved_chunk": " longitudeValueNode.update(updatedLongitude);\n // Assert\n expect(longitudeValueNode.serialized).toBe(updatedLongitude);\n });\n });\n describe('.serialized', () => {\n it('should return the serialized data associated with this value node', () => {\n // Arrange\n const longitude = 23.123;\n const longitudeValueNode = createValueNodeMock({",
"score": 26.79332866657188
}
] | typescript | : BlockTuneSerialized { |
import {
FormattingNodeConstructorParameters,
InlineToolName,
InlineToolData
} from './types';
import { ChildNode, InlineFragment, InlineNode, InlineNodeSerialized, ParentNode } from '../interfaces';
export * from './types';
/**
* We need to extend FormattingNode interface with ChildNode and ParentNode ones to use the methods from mixins
*/
export interface FormattingNode extends ChildNode, ParentNode {}
/**
* FormattingNode class represents a node in a tree-like structure, used to store and manipulate formatted text content
*/
@ParentNode
@ChildNode
export class FormattingNode implements InlineNode {
/**
* Private field representing the name of the formatting tool applied to the content
*/
#tool: InlineToolName;
/**
* Any additional data associated with the formatting tool
*/
#data?: InlineToolData;
/**
* Constructor for FormattingNode class.
*
* @param args - FormattingNode constructor arguments.
* @param args.tool - The name of the formatting tool applied to the content.
* @param args.data - Any additional data associated with the formatting.
*/
constructor({ tool, data }: FormattingNodeConstructorParameters) {
this.#tool = tool;
this.#data = data;
}
/**
* Returns text value length of current node (including subtree)
*/
public get length(): number {
return this.children.reduce((sum, child) => sum + child.length, 0);
}
/**
* Returns serialized value of the node: text and formatting fragments
*/
public get serialized(): InlineNodeSerialized {
return {
text: this.getText(),
fragments: this.getFragments(),
};
}
/**
* Inserts text to the specified index, by default appends text to the end of the current value
*
* @param text - text to insert
* @param [index] - char index where to insert text
*/
public insertText(text: string, index = this.length): void {
const [child, offset] = this.#findChildByIndex(index);
child?.insertText(text, index - offset);
}
/**
* Removes text form the specified range
*
* @param [start] - start char index of the range, by default 0
* @param [end] - end char index of the range, by default length of the text value
* @returns {string} removed text
*/
public removeText(start = 0, end = this.length): string {
const result = this.#reduceChildrenInRange(
start,
end,
(acc, child, childStart, childEnd) => {
return acc + child.removeText(childStart, childEnd);
},
''
);
if (this.length === 0) {
this.remove();
}
return result;
}
/**
* Returns text from the specified range
*
* @param [start] - start char index of the range, by default 0
* @param [end] - end char index of the range, by default length of the text value
*/
public getText(start = 0, end = this.length): string {
return this.#reduceChildrenInRange(
start,
end,
(acc, child, childStart, childEnd) => {
return acc + child.getText(childStart, childEnd);
},
''
);
}
/**
* Returns inline fragments for subtree including current node from the specified range
*
* @param [start] - start char index of the range, by default 0
* @param [end] - end char index of the range, by default length of the text value
*/
public getFragments(start = 0, end = this.length): InlineFragment[] {
return this.#reduceChildrenInRange<InlineFragment[]>(
start,
end,
(acc, child, childStart, childEnd) => {
/**
* If child is not a FormattingNode, it doesn't include any fragments. So we skip it.
*/
if (!(child instanceof FormattingNode)) {
return acc;
}
acc.push(...child.getFragments(childStart, childEnd));
return acc;
},
[ {
tool: this.#tool,
data: this.#data,
range: [start, end],
} ]
);
}
/**
* Splits current node by the specified index
*
* @param index - char index where to split the node
* @returns {FormattingNode | null} new node
*/
public split(index: number): FormattingNode | null {
if (index === 0 || index === this.length) {
return null;
}
const newNode = new FormattingNode({
tool: this.#tool,
data: this.#data,
});
const [child, offset] = this.#findChildByIndex(index);
if (!child) {
return null;
}
// Have to save length as it is changed after split
const childLength = child.length;
const splitNode = child.split(index - offset);
| let midNodeIndex = this.children.indexOf(child); |
/**
* If node is split or if node is not split but index equals to child length, we should split children from the next node
*/
if (splitNode || (index - offset === childLength)) {
midNodeIndex += 1;
}
newNode.append(...this.children.slice(midNodeIndex));
this.parent?.insertAfter(this, newNode);
return newNode;
}
/**
* Applies formatting to the text with specified inline tool in the specified range
*
* @param tool - name of inline tool to apply
* @param start - char start index of the range
* @param end - char end index of the range
* @param [data] - inline tool data if applicable
*/
public format(tool: InlineToolName, start: number, end: number, data?: InlineToolData): InlineNode[] {
/**
* In case current tool is the same as new one, do nothing
*/
if (tool === this.#tool) {
return [];
}
return this.#reduceChildrenInRange<InlineNode[]>(
start,
end,
(acc, child, childStart, childEnd) => {
acc.push(...child.format(tool, childStart, childEnd, data));
return acc;
},
[]
);
}
/**
* Iterates through children in range and calls callback for each
*
* @param start - range start char index
* @param end - range end char index
* @param callback - callback to apply on children
* @param initialValue - initial accumulator value
* @private
*/
#reduceChildrenInRange<Acc>(
start: number,
end: number,
callback: (acc: Acc, child: InlineNode, start: number, end: number) => Acc,
initialValue: Acc
): Acc {
let result = initialValue;
for (const child of this.children) {
if (start < child.length && end > 0 && start < end) {
result = callback(result, child, Math.max(start, 0), Math.min(child.length, end));
}
start -= child.length;
end -= child.length;
}
return result;
}
/**
* Returns child by passed text index
*
* @param index - char index
* @private
*/
#findChildByIndex(index: number): [child: InlineNode & ChildNode | null, offset: number] {
let totalLength = 0;
for (const child of this.children) {
if (index <= child.length + totalLength) {
return [child, totalLength];
}
totalLength += child.length;
}
return [null, totalLength];
}
}
| src/entities/FormattingNode/index.ts | editor-js-document-model-4cb9623 | [
{
"filename": "src/entities/interfaces/ParentNode.ts",
"retrieved_chunk": " *\n * @param child - child to remove\n */\n public removeChild(child: ChildNode): void {\n const index = this.children.indexOf(child);\n this.children.splice(index, 1);\n child.remove();\n }\n /**\n * Inserts new children after specified target",
"score": 35.38262101863551
},
{
"filename": "src/entities/interfaces/ParentNode.ts",
"retrieved_chunk": " if (index === -1) {\n return;\n }\n this.children.splice(index, 1);\n });\n const index = this.children.indexOf(target);\n this.children.splice(index + 1, 0, ...children);\n children.forEach(child => child.appendTo(this));\n }\n };",
"score": 32.93776582751483
},
{
"filename": "src/entities/interfaces/ParentNode.ts",
"retrieved_chunk": " if (index === -1) {\n return;\n }\n this.children.splice(index, 1);\n });\n this.children.push(...children);\n children.forEach(child => child.appendTo(this));\n }\n /**\n * Removes a child from the parent",
"score": 29.65958990016528
},
{
"filename": "src/entities/FormattingNode/FormattingNode.spec.ts",
"retrieved_chunk": " expect(childMock.insertText).toBeCalledWith(newText, index);\n });\n it('should adjust index by child offset', () => {\n const offset = childMock.length;\n node.insertText(newText, index + offset);\n expect(anotherChildMock.insertText).toBeCalledWith(newText, index);\n });\n it('should append text to the last child by default', () => {\n node.insertText(newText);\n expect(anotherChildMock.insertText).toBeCalledWith(newText, anotherChildMock.length);",
"score": 29.37334833311151
},
{
"filename": "src/entities/interfaces/ParentNode.ts",
"retrieved_chunk": " * Appends passed children to this node\n *\n * @param children - array of children to append\n */\n public append(...children: ChildNode[]): void {\n /**\n * If node is already a child of current node, remove it to append at the end\n */\n children.forEach(child => {\n const index = this.children.indexOf(child);",
"score": 28.83084982781678
}
] | typescript | let midNodeIndex = this.children.indexOf(child); |
import {
FormattingNodeConstructorParameters,
InlineToolName,
InlineToolData
} from './types';
import { ChildNode, InlineFragment, InlineNode, InlineNodeSerialized, ParentNode } from '../interfaces';
export * from './types';
/**
* We need to extend FormattingNode interface with ChildNode and ParentNode ones to use the methods from mixins
*/
export interface FormattingNode extends ChildNode, ParentNode {}
/**
* FormattingNode class represents a node in a tree-like structure, used to store and manipulate formatted text content
*/
@ParentNode
@ChildNode
export class FormattingNode implements InlineNode {
/**
* Private field representing the name of the formatting tool applied to the content
*/
#tool: InlineToolName;
/**
* Any additional data associated with the formatting tool
*/
#data?: InlineToolData;
/**
* Constructor for FormattingNode class.
*
* @param args - FormattingNode constructor arguments.
* @param args.tool - The name of the formatting tool applied to the content.
* @param args.data - Any additional data associated with the formatting.
*/
constructor({ tool, data }: FormattingNodeConstructorParameters) {
this.#tool = tool;
this.#data = data;
}
/**
* Returns text value length of current node (including subtree)
*/
public get length(): number {
return this.children.reduce((sum, child) => sum + child.length, 0);
}
/**
* Returns serialized value of the node: text and formatting fragments
*/
public get serialized(): InlineNodeSerialized {
return {
text: this.getText(),
fragments: this.getFragments(),
};
}
/**
* Inserts text to the specified index, by default appends text to the end of the current value
*
* @param text - text to insert
* @param [index] - char index where to insert text
*/
public insertText(text: string, index = this.length): void {
const [child, offset] = this.#findChildByIndex(index);
child?.insertText(text, index - offset);
}
/**
* Removes text form the specified range
*
* @param [start] - start char index of the range, by default 0
* @param [end] - end char index of the range, by default length of the text value
* @returns {string} removed text
*/
public removeText(start = 0, end = this.length): string {
const result = this.#reduceChildrenInRange(
start,
end,
(acc, child, childStart, childEnd) => {
return acc + child.removeText(childStart, childEnd);
},
''
);
if (this.length === 0) {
this.remove();
}
return result;
}
/**
* Returns text from the specified range
*
* @param [start] - start char index of the range, by default 0
* @param [end] - end char index of the range, by default length of the text value
*/
public getText(start = 0, end = this.length): string {
return this.#reduceChildrenInRange(
start,
end,
(acc, child, childStart, childEnd) => {
return acc + child.getText(childStart, childEnd);
},
''
);
}
/**
* Returns inline fragments for subtree including current node from the specified range
*
* @param [start] - start char index of the range, by default 0
* @param [end] - end char index of the range, by default length of the text value
*/
public getFragments(start = 0, end = this.length): InlineFragment[] {
return this.#reduceChildrenInRange<InlineFragment[]>(
start,
end,
(acc, child, childStart, childEnd) => {
/**
* If child is not a FormattingNode, it doesn't include any fragments. So we skip it.
*/
if (!(child instanceof FormattingNode)) {
return acc;
}
acc.push(...child.getFragments(childStart, childEnd));
return acc;
},
[ {
tool: this.#tool,
data: this.#data,
range: [start, end],
} ]
);
}
/**
* Splits current node by the specified index
*
* @param index - char index where to split the node
* @returns {FormattingNode | null} new node
*/
public split(index: number): FormattingNode | null {
if (index === 0 || index === this.length) {
return null;
}
const newNode = new FormattingNode({
tool: this.#tool,
data: this.#data,
});
const [child, offset] = this.#findChildByIndex(index);
if (!child) {
return null;
}
// Have to save length as it is changed after split
const childLength = child.length;
const splitNode = child.split(index - offset);
let midNodeIndex = this.children.indexOf(child);
/**
* If node is split or if node is not split but index equals to child length, we should split children from the next node
*/
if (splitNode || (index - offset === childLength)) {
midNodeIndex += 1;
}
newNode.append( | ...this.children.slice(midNodeIndex)); |
this.parent?.insertAfter(this, newNode);
return newNode;
}
/**
* Applies formatting to the text with specified inline tool in the specified range
*
* @param tool - name of inline tool to apply
* @param start - char start index of the range
* @param end - char end index of the range
* @param [data] - inline tool data if applicable
*/
public format(tool: InlineToolName, start: number, end: number, data?: InlineToolData): InlineNode[] {
/**
* In case current tool is the same as new one, do nothing
*/
if (tool === this.#tool) {
return [];
}
return this.#reduceChildrenInRange<InlineNode[]>(
start,
end,
(acc, child, childStart, childEnd) => {
acc.push(...child.format(tool, childStart, childEnd, data));
return acc;
},
[]
);
}
/**
* Iterates through children in range and calls callback for each
*
* @param start - range start char index
* @param end - range end char index
* @param callback - callback to apply on children
* @param initialValue - initial accumulator value
* @private
*/
#reduceChildrenInRange<Acc>(
start: number,
end: number,
callback: (acc: Acc, child: InlineNode, start: number, end: number) => Acc,
initialValue: Acc
): Acc {
let result = initialValue;
for (const child of this.children) {
if (start < child.length && end > 0 && start < end) {
result = callback(result, child, Math.max(start, 0), Math.min(child.length, end));
}
start -= child.length;
end -= child.length;
}
return result;
}
/**
* Returns child by passed text index
*
* @param index - char index
* @private
*/
#findChildByIndex(index: number): [child: InlineNode & ChildNode | null, offset: number] {
let totalLength = 0;
for (const child of this.children) {
if (index <= child.length + totalLength) {
return [child, totalLength];
}
totalLength += child.length;
}
return [null, totalLength];
}
}
| src/entities/FormattingNode/index.ts | editor-js-document-model-4cb9623 | [
{
"filename": "src/entities/TextNode/TextNode.spec.ts",
"retrieved_chunk": " });\n describe('.split()', () => {\n const index = 5;\n it('should not split (return null) if index is 0', () => {\n const newNode = node.split(0);\n expect(newNode).toBeNull();\n });\n it('should not split (return null) if index equals text length', () => {\n const newNode = node.split(initialText.length);\n expect(newNode).toBeNull();",
"score": 50.86840038268646
},
{
"filename": "src/entities/FormattingNode/FormattingNode.spec.ts",
"retrieved_chunk": " it('should not split (return null) if index equals text length', () => {\n const newNode = node.split(node.length);\n expect(newNode).toBeNull();\n });\n it('should create new FormattingNode on split', () => {\n const newNode = node.split(index);\n expect(newNode).toBeInstanceOf(FormattingNode);\n });\n /**\n * @todo check this and related cases with integration tests",
"score": 45.93311537929346
},
{
"filename": "src/entities/FormattingNode/FormattingNode.spec.ts",
"retrieved_chunk": " */\n it('should create new FormattingNode with children split from the original one', () => {\n const newNode = node.split(childMock.length);\n expect(newNode?.children).toEqual([ anotherChildMock ]);\n });\n it('should call split method of child containing the specified index', () => {\n node.split(index);\n expect(childMock.split).toBeCalledWith(index);\n });\n it('should insert new node to the parent', () => {",
"score": 45.83639608415784
},
{
"filename": "src/entities/FormattingNode/FormattingNode.spec.ts",
"retrieved_chunk": " },\n ]);\n });\n });\n describe('.split()', () => {\n const index = 5;\n it('should not split (return null) if index is 0', () => {\n const newNode = node.split(0);\n expect(newNode).toBeNull();\n });",
"score": 44.275619703966385
},
{
"filename": "src/entities/TextNode/index.ts",
"retrieved_chunk": " /**\n * Splits current node into two nodes by the specified index\n *\n * @param index - char index where to split\n * @returns {TextNode|null} - new node or null if split is not applicable\n */\n public split(index: number): TextNode | null {\n if (index === 0 || index === this.length) {\n return null;\n }",
"score": 43.2547310983667
}
] | typescript | ...this.children.slice(midNodeIndex)); |
import {
FormattingNodeConstructorParameters,
InlineToolName,
InlineToolData
} from './types';
import { ChildNode, InlineFragment, InlineNode, InlineNodeSerialized, ParentNode } from '../interfaces';
export * from './types';
/**
* We need to extend FormattingNode interface with ChildNode and ParentNode ones to use the methods from mixins
*/
export interface FormattingNode extends ChildNode, ParentNode {}
/**
* FormattingNode class represents a node in a tree-like structure, used to store and manipulate formatted text content
*/
@ParentNode
@ChildNode
export class FormattingNode implements InlineNode {
/**
* Private field representing the name of the formatting tool applied to the content
*/
#tool: InlineToolName;
/**
* Any additional data associated with the formatting tool
*/
#data?: InlineToolData;
/**
* Constructor for FormattingNode class.
*
* @param args - FormattingNode constructor arguments.
* @param args.tool - The name of the formatting tool applied to the content.
* @param args.data - Any additional data associated with the formatting.
*/
constructor({ tool, data }: FormattingNodeConstructorParameters) {
this.#tool = tool;
this.#data = data;
}
/**
* Returns text value length of current node (including subtree)
*/
public get length(): number {
return this.children.reduce((sum, child) => sum + child.length, 0);
}
/**
* Returns serialized value of the node: text and formatting fragments
*/
public get serialized(): InlineNodeSerialized {
return {
text: this.getText(),
fragments: this.getFragments(),
};
}
/**
* Inserts text to the specified index, by default appends text to the end of the current value
*
* @param text - text to insert
* @param [index] - char index where to insert text
*/
public insertText(text: string, index = this.length): void {
const [child, offset] = this.#findChildByIndex(index);
child?.insertText(text, index - offset);
}
/**
* Removes text form the specified range
*
* @param [start] - start char index of the range, by default 0
* @param [end] - end char index of the range, by default length of the text value
* @returns {string} removed text
*/
public removeText(start = 0, end = this.length): string {
const result = this.#reduceChildrenInRange(
start,
end,
(acc, child, childStart, childEnd) => {
return acc + child.removeText(childStart, childEnd);
},
''
);
if (this.length === 0) {
this.remove();
}
return result;
}
/**
* Returns text from the specified range
*
* @param [start] - start char index of the range, by default 0
* @param [end] - end char index of the range, by default length of the text value
*/
public getText(start = 0, end = this.length): string {
return this.#reduceChildrenInRange(
start,
end,
(acc, child, childStart, childEnd) => {
return acc + child.getText(childStart, childEnd);
},
''
);
}
/**
* Returns inline fragments for subtree including current node from the specified range
*
* @param [start] - start char index of the range, by default 0
* @param [end] - end char index of the range, by default length of the text value
*/
| public getFragments(start = 0, end = this.length): InlineFragment[] { |
return this.#reduceChildrenInRange<InlineFragment[]>(
start,
end,
(acc, child, childStart, childEnd) => {
/**
* If child is not a FormattingNode, it doesn't include any fragments. So we skip it.
*/
if (!(child instanceof FormattingNode)) {
return acc;
}
acc.push(...child.getFragments(childStart, childEnd));
return acc;
},
[ {
tool: this.#tool,
data: this.#data,
range: [start, end],
} ]
);
}
/**
* Splits current node by the specified index
*
* @param index - char index where to split the node
* @returns {FormattingNode | null} new node
*/
public split(index: number): FormattingNode | null {
if (index === 0 || index === this.length) {
return null;
}
const newNode = new FormattingNode({
tool: this.#tool,
data: this.#data,
});
const [child, offset] = this.#findChildByIndex(index);
if (!child) {
return null;
}
// Have to save length as it is changed after split
const childLength = child.length;
const splitNode = child.split(index - offset);
let midNodeIndex = this.children.indexOf(child);
/**
* If node is split or if node is not split but index equals to child length, we should split children from the next node
*/
if (splitNode || (index - offset === childLength)) {
midNodeIndex += 1;
}
newNode.append(...this.children.slice(midNodeIndex));
this.parent?.insertAfter(this, newNode);
return newNode;
}
/**
* Applies formatting to the text with specified inline tool in the specified range
*
* @param tool - name of inline tool to apply
* @param start - char start index of the range
* @param end - char end index of the range
* @param [data] - inline tool data if applicable
*/
public format(tool: InlineToolName, start: number, end: number, data?: InlineToolData): InlineNode[] {
/**
* In case current tool is the same as new one, do nothing
*/
if (tool === this.#tool) {
return [];
}
return this.#reduceChildrenInRange<InlineNode[]>(
start,
end,
(acc, child, childStart, childEnd) => {
acc.push(...child.format(tool, childStart, childEnd, data));
return acc;
},
[]
);
}
/**
* Iterates through children in range and calls callback for each
*
* @param start - range start char index
* @param end - range end char index
* @param callback - callback to apply on children
* @param initialValue - initial accumulator value
* @private
*/
#reduceChildrenInRange<Acc>(
start: number,
end: number,
callback: (acc: Acc, child: InlineNode, start: number, end: number) => Acc,
initialValue: Acc
): Acc {
let result = initialValue;
for (const child of this.children) {
if (start < child.length && end > 0 && start < end) {
result = callback(result, child, Math.max(start, 0), Math.min(child.length, end));
}
start -= child.length;
end -= child.length;
}
return result;
}
/**
* Returns child by passed text index
*
* @param index - char index
* @private
*/
#findChildByIndex(index: number): [child: InlineNode & ChildNode | null, offset: number] {
let totalLength = 0;
for (const child of this.children) {
if (index <= child.length + totalLength) {
return [child, totalLength];
}
totalLength += child.length;
}
return [null, totalLength];
}
}
| src/entities/FormattingNode/index.ts | editor-js-document-model-4cb9623 | [
{
"filename": "src/entities/TextNode/index.ts",
"retrieved_chunk": " * Remove text from specified range\n *\n * @param [start] - start char index of the range, 0 by default\n * @param [end] - end char index of the range, text length by default\n * @returns {string} removed text\n */\n public removeText(start = 0, end = this.length): string {\n this.#validateIndex(start);\n this.#validateIndex(end);\n const removedValue = this.#value.slice(start, end);",
"score": 73.69148449662156
},
{
"filename": "src/entities/TextNode/index.ts",
"retrieved_chunk": " this.#value = this.#value.slice(0, start) + this.#value.slice(end);\n if (this.length === 0) {\n this.remove();\n }\n return removedValue;\n }\n /**\n * Returns text value from the specified range\n *\n * @param [start] - start char index of the range, 0 by default",
"score": 66.48752903485227
},
{
"filename": "src/entities/interfaces/InlineNode.ts",
"retrieved_chunk": " * Returns text value in passed range\n *\n * @param start - start char index of the range\n * @param end - end char index of the range\n */\n getText(start?: number, end?: number): string;\n /**\n * Applies inline formatting on the passed range\n *\n * @param name - name of Inline Tool to apply",
"score": 55.9095416175525
},
{
"filename": "src/entities/TextNode/index.ts",
"retrieved_chunk": " * @param [end] - end char index of the range, text length by default\n */\n public getText(start = 0, end = this.length): string {\n if (start > end) {\n // Stryker disable next-line StringLiteral\n throw new Error(`Start index ${start} should be less or equal than end index ${end}`);\n }\n this.#validateIndex(start);\n this.#validateIndex(end);\n return this.#value.slice(start, end);",
"score": 55.218538273675875
},
{
"filename": "src/entities/TextNode/index.ts",
"retrieved_chunk": " }\n /**\n * Applies inline tool for specified range\n *\n * @param tool - name of the tool to apply\n * @param start - start char index of the range\n * @param end - end char index of the range\n * @param [data] - inline tool data if applicable\n * @returns {InlineNode[]} - array of nodes after applied formatting\n */",
"score": 52.089458103336675
}
] | typescript | public getFragments(start = 0, end = this.length): InlineFragment[] { |
import { describe, it, expect, beforeEach } from '@jest/globals';
import { TextNode } from './index';
import { createInlineToolName, FormattingNode } from '../FormattingNode';
import type { ParentNode } from '../interfaces';
describe('TextNode', () => {
const initialText = 'initial text';
const text = 'some text';
const parentMock = {
insertAfter: jest.fn(),
removeChild: jest.fn(),
append: jest.fn(),
children: [],
} as unknown as ParentNode;
let node: TextNode;
beforeEach(() => {
node = new TextNode({
value: initialText,
parent: parentMock as FormattingNode,
});
});
it('should have empty value by default', () => {
node = new TextNode();
expect(node.getText()).toEqual('');
});
describe('.insertText()', () => {
it('should set text to value if node is empty', () => {
node = new TextNode();
node.insertText(text);
expect(node.getText()).toEqual(text);
});
it('should append text if not empty', () => {
node.insertText(text);
expect(node.getText()).toEqual(initialText + text);
});
it('should prepend text if index is 0 and node is not empty', () => {
node.insertText(text, 0);
expect(node.getText()).toEqual(text + initialText);
});
it('should insert text at index if not empty', () => {
const index = 5;
node.insertText(text, index);
expect(node.getText()).toEqual(initialText.slice(0, index) + text + initialText.slice(index));
});
it('should throw an error if index is less than 0', () => {
const f = (): void => node.insertText(text, -1);
expect(f).toThrowError();
});
it('should throw an error if index is greater than node length', () => {
const f = (): void => node.insertText(text, initialText.length + 1);
expect(f).toThrowError();
});
});
describe('.getText()', () => {
it('should return sliced value if start provided', () => {
const start = 5;
expect(node.getText(start)).toEqual(initialText.slice(start));
});
it('should return sliced value if end provided', () => {
const end = 6;
expect(node.getText(0, end)).toEqual(initialText.slice(0, end));
});
it('should return sliced value if full range provided', () => {
const start = 3;
const end = 9;
expect(node.getText(start, end)).toEqual(initialText.slice(start, end));
});
it('should throw an error if start is invalid index', () => {
expect(() => node.getText(-1)).toThrowError();
expect(() => node.getText(initialText.length + 1)).toThrowError();
});
it('should throw an error if end is invalid index', () => {
expect(() => node.getText(0, initialText.length + 1)).toThrowError();
});
it('should throw an error if end index is greater than start index', () => {
const start = 5;
const end = 3;
expect(() => node.getText(start, end)).toThrowError();
});
it('should not throw an error if end index is equal to start index', () => {
const start = 5;
const end = 5;
expect(() => node.getText(start, end)).not.toThrowError();
});
});
describe('.removeText()', () => {
it('should remove all text by default', () => {
node.removeText();
expect(node.getText()).toEqual('');
});
it('should remove text from specified index', () => {
const start = 3;
node.removeText(start);
expect(node.getText()).toEqual(initialText.slice(0, start));
});
it('should remove text from 0 to specified end index', () => {
const end = 8;
node.removeText(0, end);
expect(node.getText()).toEqual(initialText.slice(end));
});
it('should remove text from specified start and end indecies', () => {
const start = 3;
const end = 8;
node.removeText(start, end);
expect(node.getText()).toEqual(initialText.slice(0, start) + initialText.slice(end));
});
it('should call remove() method if node is empty after removeText() call', () => {
jest.spyOn(node, 'remove');
node.removeText();
expect(node.remove).toBeCalled();
});
});
describe('.format()', () => {
it('should return just one FormattingNode, if formatting full TextNode', () => {
const name = createInlineToolName('bold');
const fragments = node.format(name, 0, initialText.length);
expect(fragments).toHaveLength(1);
expect(fragments[0]).toBeInstanceOf(FormattingNode);
});
it('should return two fragments if formatting from the start, but not to the end', () => {
const name = createInlineToolName('bold');
const end = 5;
const fragments = node.format(name, 0, end);
expect(fragments).toHaveLength(2);
expect(fragments[0]).toBeInstanceOf(FormattingNode);
expect(fragments[1]).toBeInstanceOf(TextNode);
});
it('should return two fragments if formatting to the end, but not from the start', () => {
const name = createInlineToolName('bold');
const start = 5;
const fragments = node.format(name, start, initialText.length);
expect(fragments).toHaveLength(2);
expect(fragments[0]).toBeInstanceOf(TextNode);
expect(fragments[1]).toBeInstanceOf(FormattingNode);
});
it('should return three fragments if formatting in the middle', () => {
const name = createInlineToolName('bold');
const start = 5;
const end = 8;
const fragments = node.format(name, start, end);
// eslint-disable-next-line @typescript-eslint/no-magic-numbers
expect(fragments).toHaveLength(3);
expect(fragments[0]).toBeInstanceOf(TextNode);
expect(fragments[1]).toBeInstanceOf(FormattingNode);
expect(fragments[2]).toBeInstanceOf(TextNode);
});
it('should return FormattingNode with a TextNode as a child with correct text value', () => {
const name = createInlineToolName('bold');
const start = 5;
const end = 8;
const fragments = node.format(name, start, end);
const formattingNode = fragments[1] as FormattingNode;
expect(formattingNode.children[0].getText()).toEqual(initialText.slice(start, end));
});
it('should call parent\'s insertAfter with new nodes', () => {
const name = createInlineToolName('bold');
const start = 5;
const end = 8;
const fragments = node.format(name, start, end);
| expect(parentMock.insertAfter).toBeCalledWith(node, ...fragments); |
});
});
describe('.split()', () => {
const index = 5;
it('should not split (return null) if index is 0', () => {
const newNode = node.split(0);
expect(newNode).toBeNull();
});
it('should not split (return null) if index equals text length', () => {
const newNode = node.split(initialText.length);
expect(newNode).toBeNull();
});
it('should create new TextNode on split', () => {
const newNode = node.split(index);
expect(newNode).toBeInstanceOf(TextNode);
});
it('should create new TextNode with text value splitted from the original one', () => {
const newNode = node.split(index);
expect(newNode?.getText()).toEqual(initialText.slice(index));
});
it('should remove split text value from the original node', () => {
node.split(index);
expect(node.getText()).toEqual(initialText.slice(0, index));
});
it('should insert new node to the parent', () => {
const newNode = node.split(index);
expect(parentMock.insertAfter).toBeCalledWith(node, newNode);
});
});
describe('.serialized', () => {
it('should return text value and empty array of fragments', () => {
const result = node.serialized;
expect(result).toEqual({
text: initialText,
fragments: [],
});
});
});
});
| src/entities/TextNode/TextNode.spec.ts | editor-js-document-model-4cb9623 | [
{
"filename": "src/entities/FormattingNode/FormattingNode.spec.ts",
"retrieved_chunk": " const newNode = node.split(index);\n expect(parentMock.insertAfter).toBeCalledWith(node, newNode);\n });\n });\n describe('.format()', () => {\n const start = 3;\n const end = 5;\n it('should apply formatting to the relevant child', () => {\n node.format(anotherTool, start, end);\n expect(childMock.format).toBeCalledWith(anotherTool, start, end, undefined);",
"score": 56.71361939222522
},
{
"filename": "src/entities/TextNode/index.ts",
"retrieved_chunk": " */\n if (start > 0) {\n fragments.push(this.#cloneContents(0, start));\n }\n /**\n * Formatting is applied to the specified range\n */\n const formattedFragment = this.#cloneContents(start, end);\n formattedFragment.appendTo(formattingNode);\n fragments.push(formattingNode);",
"score": 54.57876772811659
},
{
"filename": "src/entities/TextNode/index.ts",
"retrieved_chunk": " public format(tool: InlineToolName, start: number, end: number, data?: InlineToolData): InlineNode[] {\n this.#validateIndex(start);\n this.#validateIndex(end);\n const formattingNode = new FormattingNode({\n tool,\n data,\n });\n const fragments: ChildNode[] = [];\n /**\n * If start index is greater than 0, we need extract part of the text before the start index",
"score": 49.98876540903258
},
{
"filename": "src/entities/FormattingNode/FormattingNode.spec.ts",
"retrieved_chunk": " });\n it('should return array of new formatting nodes', () => {\n const result = node.format(anotherTool, start, end);\n expect(result).toEqual(childMock.format(anotherTool, start, end));\n });\n });\n});",
"score": 48.084270541472605
},
{
"filename": "src/entities/FormattingNode/FormattingNode.spec.ts",
"retrieved_chunk": " removeSpy.mockRestore();\n lengthSpy.mockRestore();\n });\n });\n describe('.getText()', () => {\n const start = 3;\n const end = 5;\n it('should call getText() for the relevant child', () => {\n node.getText(start, end);\n expect(childMock.getText).toBeCalledWith(start, end);",
"score": 46.53276019382335
}
] | typescript | expect(parentMock.insertAfter).toBeCalledWith(node, ...fragments); |
import {
FormattingNodeConstructorParameters,
InlineToolName,
InlineToolData
} from './types';
import { ChildNode, InlineFragment, InlineNode, InlineNodeSerialized, ParentNode } from '../interfaces';
export * from './types';
/**
* We need to extend FormattingNode interface with ChildNode and ParentNode ones to use the methods from mixins
*/
export interface FormattingNode extends ChildNode, ParentNode {}
/**
* FormattingNode class represents a node in a tree-like structure, used to store and manipulate formatted text content
*/
@ParentNode
@ChildNode
export class FormattingNode implements InlineNode {
/**
* Private field representing the name of the formatting tool applied to the content
*/
#tool: InlineToolName;
/**
* Any additional data associated with the formatting tool
*/
#data?: InlineToolData;
/**
* Constructor for FormattingNode class.
*
* @param args - FormattingNode constructor arguments.
* @param args.tool - The name of the formatting tool applied to the content.
* @param args.data - Any additional data associated with the formatting.
*/
constructor({ tool, data }: FormattingNodeConstructorParameters) {
this.#tool = tool;
this.#data = data;
}
/**
* Returns text value length of current node (including subtree)
*/
public get length(): number {
return this.children.reduce((sum, child) => sum + child.length, 0);
}
/**
* Returns serialized value of the node: text and formatting fragments
*/
public get serialized(): InlineNodeSerialized {
return {
text: this.getText(),
fragments: this.getFragments(),
};
}
/**
* Inserts text to the specified index, by default appends text to the end of the current value
*
* @param text - text to insert
* @param [index] - char index where to insert text
*/
public insertText(text: string, index = this.length): void {
const [child, offset] = this.#findChildByIndex(index);
child?.insertText(text, index - offset);
}
/**
* Removes text form the specified range
*
* @param [start] - start char index of the range, by default 0
* @param [end] - end char index of the range, by default length of the text value
* @returns {string} removed text
*/
public removeText(start = 0, end = this.length): string {
const result = this.#reduceChildrenInRange(
start,
end,
(acc, child, childStart, childEnd) => {
return acc + child.removeText(childStart, childEnd);
},
''
);
if (this.length === 0) {
this.remove();
}
return result;
}
/**
* Returns text from the specified range
*
* @param [start] - start char index of the range, by default 0
* @param [end] - end char index of the range, by default length of the text value
*/
public getText(start = 0, end = this.length): string {
return this.#reduceChildrenInRange(
start,
end,
(acc, child, childStart, childEnd) => {
return acc + child.getText(childStart, childEnd);
},
''
);
}
/**
* Returns inline fragments for subtree including current node from the specified range
*
* @param [start] - start char index of the range, by default 0
* @param [end] - end char index of the range, by default length of the text value
*/
public getFragments(start = 0, end = this.length): InlineFragment[] {
return this.#reduceChildrenInRange<InlineFragment[]>(
start,
end,
(acc, child, childStart, childEnd) => {
/**
* If child is not a FormattingNode, it doesn't include any fragments. So we skip it.
*/
if (!(child instanceof FormattingNode)) {
return acc;
}
acc.push(...child.getFragments(childStart, childEnd));
return acc;
},
[ {
tool: this.#tool,
data: this.#data,
range: [start, end],
} ]
);
}
/**
* Splits current node by the specified index
*
* @param index - char index where to split the node
* @returns {FormattingNode | null} new node
*/
public split(index: number): FormattingNode | null {
if (index === 0 || index === this.length) {
return null;
}
const newNode = new FormattingNode({
tool: this.#tool,
data: this.#data,
});
const [child, offset] = this.#findChildByIndex(index);
if (!child) {
return null;
}
// Have to save length as it is changed after split
const childLength = child.length;
const splitNode = child.split(index - offset);
let midNodeIndex = this.children.indexOf(child);
/**
* If node is split or if node is not split but index equals to child length, we should split children from the next node
*/
if (splitNode || (index - offset === childLength)) {
midNodeIndex += 1;
}
newNode.append(...this.children.slice(midNodeIndex));
this.parent?.insertAfter(this, newNode);
return newNode;
}
/**
* Applies formatting to the text with specified inline tool in the specified range
*
* @param tool - name of inline tool to apply
* @param start - char start index of the range
* @param end - char end index of the range
* @param [data] - inline tool data if applicable
*/
public format(tool: InlineToolName, start: number, end: number, data?: InlineToolData): InlineNode[] {
/**
* In case current tool is the same as new one, do nothing
*/
if (tool === this.#tool) {
return [];
}
return this.#reduceChildrenInRange<InlineNode[]>(
start,
end,
(acc, child, childStart, childEnd) => {
acc.push(...child.format(tool, childStart, childEnd, data));
return acc;
},
[]
);
}
/**
* Iterates through children in range and calls callback for each
*
* @param start - range start char index
* @param end - range end char index
* @param callback - callback to apply on children
* @param initialValue - initial accumulator value
* @private
*/
#reduceChildrenInRange<Acc>(
start: number,
end: number,
callback: (acc: Acc, child: InlineNode, start: number, end: number) => Acc,
initialValue: Acc
): Acc {
let result = initialValue;
for (const child of this.children) {
if ( | start < child.length && end > 0 && start < end) { |
result = callback(result, child, Math.max(start, 0), Math.min(child.length, end));
}
start -= child.length;
end -= child.length;
}
return result;
}
/**
* Returns child by passed text index
*
* @param index - char index
* @private
*/
#findChildByIndex(index: number): [child: InlineNode & ChildNode | null, offset: number] {
let totalLength = 0;
for (const child of this.children) {
if (index <= child.length + totalLength) {
return [child, totalLength];
}
totalLength += child.length;
}
return [null, totalLength];
}
}
| src/entities/FormattingNode/index.ts | editor-js-document-model-4cb9623 | [
{
"filename": "src/entities/TextNode/index.ts",
"retrieved_chunk": " *\n * @param start - start char index of the range\n * @param end - end char index of the range\n */\n #cloneContents(start: number, end: number): TextNode {\n return new TextNode({\n value: this.getText(start, end),\n });\n }\n}",
"score": 41.10763827841306
},
{
"filename": "src/entities/TextNode/index.ts",
"retrieved_chunk": " public format(tool: InlineToolName, start: number, end: number, data?: InlineToolData): InlineNode[] {\n this.#validateIndex(start);\n this.#validateIndex(end);\n const formattingNode = new FormattingNode({\n tool,\n data,\n });\n const fragments: ChildNode[] = [];\n /**\n * If start index is greater than 0, we need extract part of the text before the start index",
"score": 38.6005882626302
},
{
"filename": "src/entities/interfaces/InlineNode.ts",
"retrieved_chunk": " * @param [start] - start char index of the range\n * @param [end] - end char index of the range\n * @param [data] - Inline Tool data if applicable\n * @returns {InlineNode[]} - array of nodes after applied formatting\n */\n format(name: InlineToolName, start?: number, end?: number, data?: InlineToolData): InlineNode[];\n /**\n * Inserts text at passed char index\n *\n * @param text - text to insert",
"score": 36.25998578728295
},
{
"filename": "src/entities/interfaces/InlineNode.ts",
"retrieved_chunk": " tool: InlineToolName;\n /**\n * Inline Tool Data if applicable\n */\n data?: InlineToolData;\n /**\n * Range of the fragment\n */\n range: [start: number, end: number];\n}",
"score": 35.37480316742605
},
{
"filename": "src/entities/interfaces/InlineNode.ts",
"retrieved_chunk": " * Returns text value in passed range\n *\n * @param start - start char index of the range\n * @param end - end char index of the range\n */\n getText(start?: number, end?: number): string;\n /**\n * Applies inline formatting on the passed range\n *\n * @param name - name of Inline Tool to apply",
"score": 34.00811904656855
}
] | typescript | start < child.length && end > 0 && start < end) { |
import { FormattingNode, InlineToolName, InlineToolData } from '../FormattingNode';
import { TextNodeConstructorParameters } from './types';
import { ChildNode, InlineNode, InlineNodeSerialized } from '../interfaces';
export * from './types';
export interface TextNode extends ChildNode {}
/**
* TextNode class represents a node in a tree-like structure, used to store and manipulate text content.
*/
@ChildNode
export class TextNode implements InlineNode {
/**
* Private field representing the text content of the node
*/
#value: string;
/**
* Constructor for TextNode class
*
* @param args - TextNode constructor arguments.
* @param args.value - Text content of the node.
*/
constructor({ value = '' }: TextNodeConstructorParameters = {}) {
this.#value = value;
}
/**
* Returns length of the text
*/
public get length(): number {
return this.#value.length;
}
/**
* Returns serialized value of the node
*/
public get serialized(): InlineNodeSerialized {
return {
text: this.getText(),
// No fragments for text node
fragments: [],
};
}
/**
* Inserts text to specified position. By default, appends new text to the current value
*
* @param text - text to insert
* @param [index] - char start index
*/
public insertText(text: string, index = this.length): void {
this.#validateIndex(index);
this.#value = this.#value.slice(0, index) + text + this.#value.slice(index);
}
/**
* Remove text from specified range
*
* @param [start] - start char index of the range, 0 by default
* @param [end] - end char index of the range, text length by default
* @returns {string} removed text
*/
public removeText(start = 0, end = this.length): string {
this.#validateIndex(start);
this.#validateIndex(end);
const removedValue = this.#value.slice(start, end);
this.#value = this.#value.slice(0, start) + this.#value.slice(end);
if (this.length === 0) {
this.remove();
}
return removedValue;
}
/**
* Returns text value from the specified range
*
* @param [start] - start char index of the range, 0 by default
* @param [end] - end char index of the range, text length by default
*/
public getText(start = 0, end = this.length): string {
if (start > end) {
// Stryker disable next-line StringLiteral
throw new Error(`Start index ${start} should be less or equal than end index ${end}`);
}
this.#validateIndex(start);
this.#validateIndex(end);
return this.#value.slice(start, end);
}
/**
* Applies inline tool for specified range
*
* @param tool - name of the tool to apply
* @param start - start char index of the range
* @param end - end char index of the range
* @param [data] - inline tool data if applicable
* @returns {InlineNode[]} - array of nodes after applied formatting
*/
public format(tool: InlineToolName, start: number, end: number, data?: InlineToolData): InlineNode[] {
this.#validateIndex(start);
this.#validateIndex(end);
| const formattingNode = new FormattingNode({ |
tool,
data,
});
const fragments: ChildNode[] = [];
/**
* If start index is greater than 0, we need extract part of the text before the start index
*/
if (start > 0) {
fragments.push(this.#cloneContents(0, start));
}
/**
* Formatting is applied to the specified range
*/
const formattedFragment = this.#cloneContents(start, end);
formattedFragment.appendTo(formattingNode);
fragments.push(formattingNode);
/**
* If end index is less than the text length, we need to extract part of the text after the end index
*/
if (end < this.length) {
fragments.push(this.#cloneContents(end, this.length));
}
this.parent?.insertAfter(this, ...fragments);
this.remove();
return fragments;
}
/**
* Splits current node into two nodes by the specified index
*
* @param index - char index where to split
* @returns {TextNode|null} - new node or null if split is not applicable
*/
public split(index: number): TextNode | null {
if (index === 0 || index === this.length) {
return null;
}
const newNode = new TextNode();
const text = this.removeText(index);
newNode.insertText(text);
this.parent?.insertAfter(this, newNode);
return newNode;
}
/**
* Validates index
*
* @param index - char index to validate
* @throws Error if index is out of the text length
*/
#validateIndex(index: number): void {
if (index < 0 || index > this.length) {
// Stryker disable next-line StringLiteral
throw new Error(`Index ${index} is not in valid range [0, ${this.length}]`);
}
}
/**
* Clones specified range to a new TextNode
*
* @param start - start char index of the range
* @param end - end char index of the range
*/
#cloneContents(start: number, end: number): TextNode {
return new TextNode({
value: this.getText(start, end),
});
}
}
| src/entities/TextNode/index.ts | editor-js-document-model-4cb9623 | [
{
"filename": "src/entities/FormattingNode/index.ts",
"retrieved_chunk": " * @param tool - name of inline tool to apply\n * @param start - char start index of the range\n * @param end - char end index of the range\n * @param [data] - inline tool data if applicable\n */\n public format(tool: InlineToolName, start: number, end: number, data?: InlineToolData): InlineNode[] {\n /**\n * In case current tool is the same as new one, do nothing\n */\n if (tool === this.#tool) {",
"score": 102.98566255799211
},
{
"filename": "src/entities/interfaces/InlineNode.ts",
"retrieved_chunk": " * @param [start] - start char index of the range\n * @param [end] - end char index of the range\n * @param [data] - Inline Tool data if applicable\n * @returns {InlineNode[]} - array of nodes after applied formatting\n */\n format(name: InlineToolName, start?: number, end?: number, data?: InlineToolData): InlineNode[];\n /**\n * Inserts text at passed char index\n *\n * @param text - text to insert",
"score": 94.94496977483193
},
{
"filename": "src/entities/interfaces/InlineNode.ts",
"retrieved_chunk": " * Returns text value in passed range\n *\n * @param start - start char index of the range\n * @param end - end char index of the range\n */\n getText(start?: number, end?: number): string;\n /**\n * Applies inline formatting on the passed range\n *\n * @param name - name of Inline Tool to apply",
"score": 70.19013315142624
},
{
"filename": "src/entities/interfaces/InlineNode.ts",
"retrieved_chunk": " tool: InlineToolName;\n /**\n * Inline Tool Data if applicable\n */\n data?: InlineToolData;\n /**\n * Range of the fragment\n */\n range: [start: number, end: number];\n}",
"score": 66.20334798064289
},
{
"filename": "src/entities/interfaces/InlineNode.ts",
"retrieved_chunk": " * @param [index] - char index where to insert text\n */\n insertText(text: string, index?: number): void;\n /**\n * Removes text from the passed range\n *\n * @param [start] - start char index of the range\n * @param [end] - утв char index of the range\n * @returns {string} removed text\n */",
"score": 56.502072246178074
}
] | typescript | const formattingNode = new FormattingNode({ |
import { EditorDocument } from '../EditorDocument';
import { BlockTune, BlockTuneName, BlockTuneSerialized } from '../BlockTune';
import {
BlockNodeConstructorParameters,
BlockNodeName,
createBlockNodeName,
DataKey,
createDataKey, BlockNodeData,
BlockNodeSerialized
} from './types';
/**
* BlockNode class represents a node in a tree-like structure used to store and manipulate Blocks in an editor document.
* A BlockNode can contain one or more child nodes of type TextNode, ValueNode or FormattingNode.
* It can also be associated with one or more BlockTunes, which can modify the behavior of the BlockNode.
*/
export class BlockNode {
/**
* Field representing a name of the Tool created this Block
*/
#name: BlockNodeName;
/**
* Field representing the content of the Block
*/
#data: BlockNodeData;
/**
* Field representing the parent EditorDocument of the BlockNode
*/
#parent: EditorDocument;
/**
* Private field representing the BlockTunes associated with the BlockNode
*/
#tunes: Record<BlockTuneName, BlockTune>;
/**
* Constructor for BlockNode class.
*
* @param args - TextNode constructor arguments.
* @param args.name - The name of the BlockNode.
* @param args.data - The content of the BlockNode.
* @param args.parent - The parent EditorDocument of the BlockNode.
* @param args.tunes - The BlockTunes associated with the BlockNode.
*/
constructor({ name, data, parent, tunes = {} }: BlockNodeConstructorParameters) {
this.#name = name;
this.#data = data;
this.#parent = parent;
this.#tunes = tunes;
}
/**
* Returns serialized object representing the BlockNode
*/
public get serialized(): BlockNodeSerialized {
const serializedData = Object
.entries(this.#data)
.reduce(
(acc, [dataKey, value]) => {
/**
* If the value is an array, we need to serialize each node in the array
* Value is an array if the BlockNode contains TextNodes and FormattingNodes
* After serializing there will be InlineNodeSerialized object
*/
if (value instanceof Array) {
acc[dataKey] = value.map((node) => node.serialized);
return acc;
}
acc[dataKey] = value.serialized;
return acc;
},
{} as Record<string, unknown>
);
const serializedTunes = Object
.entries(this.#tunes)
.reduce(
(acc, [name, tune]) => {
acc[name] = tune.serialized;
return acc;
},
| {} as Record<string, BlockTuneSerialized>
); |
return {
name: this.#name,
data: serializedData,
tunes: serializedTunes,
};
}
}
export {
BlockNodeName,
createBlockNodeName,
DataKey,
createDataKey
};
| src/entities/BlockNode/index.ts | editor-js-document-model-4cb9623 | [
{
"filename": "src/entities/BlockNode/BlockNode.spec.ts",
"retrieved_chunk": " createBlockTuneName('align'),\n createBlockTuneName('font-size'),\n createBlockTuneName('font-weight'),\n ];\n const blockTunes = blockTunesNames.reduce((acc, name) => ({\n ...acc,\n [name]: new BlockTune({} as BlockTuneConstructorParameters),\n }), {});\n const spyArray = Object\n .values(blockTunes)",
"score": 33.30014296881142
},
{
"filename": "src/entities/BlockTune/types/BlockTuneSerialized.ts",
"retrieved_chunk": "/**\n * BlockTuneSerialized represents a serialized version of a BlockTune.\n */\nexport interface BlockTuneSerialized {\n /**\n * The name of the tune.\n * Serialized as a string.\n */\n name: string;\n /**",
"score": 26.76837894597172
},
{
"filename": "src/entities/FormattingNode/index.ts",
"retrieved_chunk": " return [];\n }\n return this.#reduceChildrenInRange<InlineNode[]>(\n start,\n end,\n (acc, child, childStart, childEnd) => {\n acc.push(...child.format(tool, childStart, childEnd, data));\n return acc;\n },\n []",
"score": 21.175772522381926
},
{
"filename": "src/entities/BlockTune/types/BlockTuneConstructorParameters.ts",
"retrieved_chunk": "import { BlockTuneName } from './BlockTuneName';\nexport interface BlockTuneConstructorParameters {\n /**\n * The name of the tune\n */\n name: BlockTuneName;\n /**\n * Any additional data associated with the tune\n */\n data: Record<string, unknown>;",
"score": 20.703069473990997
},
{
"filename": "src/entities/BlockNode/BlockNode.spec.ts",
"retrieved_chunk": " spyArray.forEach((spy) => {\n expect(spy).toHaveBeenCalled();\n });\n });\n it('should call .serialized getter of all child ValueNodes associated with the BlockNode', () => {\n const countOfValueNodes = 2;\n const valueNodes = [ ...Array(countOfValueNodes).keys() ]\n .reduce((acc, index) => ({\n ...acc,\n [createDataKey(`data-key-${index}c${index}d`)]: new ValueNode({} as ValueNodeConstructorParameters),",
"score": 20.64791867222776
}
] | typescript | {} as Record<string, BlockTuneSerialized>
); |
import { FormattingNode, InlineToolName, InlineToolData } from '../FormattingNode';
import { TextNodeConstructorParameters } from './types';
import { ChildNode, InlineNode, InlineNodeSerialized } from '../interfaces';
export * from './types';
export interface TextNode extends ChildNode {}
/**
* TextNode class represents a node in a tree-like structure, used to store and manipulate text content.
*/
@ChildNode
export class TextNode implements InlineNode {
/**
* Private field representing the text content of the node
*/
#value: string;
/**
* Constructor for TextNode class
*
* @param args - TextNode constructor arguments.
* @param args.value - Text content of the node.
*/
constructor({ value = '' }: TextNodeConstructorParameters = {}) {
this.#value = value;
}
/**
* Returns length of the text
*/
public get length(): number {
return this.#value.length;
}
/**
* Returns serialized value of the node
*/
public get serialized(): InlineNodeSerialized {
return {
text: this.getText(),
// No fragments for text node
fragments: [],
};
}
/**
* Inserts text to specified position. By default, appends new text to the current value
*
* @param text - text to insert
* @param [index] - char start index
*/
public insertText(text: string, index = this.length): void {
this.#validateIndex(index);
this.#value = this.#value.slice(0, index) + text + this.#value.slice(index);
}
/**
* Remove text from specified range
*
* @param [start] - start char index of the range, 0 by default
* @param [end] - end char index of the range, text length by default
* @returns {string} removed text
*/
public removeText(start = 0, end = this.length): string {
this.#validateIndex(start);
this.#validateIndex(end);
const removedValue = this.#value.slice(start, end);
this.#value = this.#value.slice(0, start) + this.#value.slice(end);
if (this.length === 0) {
this.remove();
}
return removedValue;
}
/**
* Returns text value from the specified range
*
* @param [start] - start char index of the range, 0 by default
* @param [end] - end char index of the range, text length by default
*/
public getText(start = 0, end = this.length): string {
if (start > end) {
// Stryker disable next-line StringLiteral
throw new Error(`Start index ${start} should be less or equal than end index ${end}`);
}
this.#validateIndex(start);
this.#validateIndex(end);
return this.#value.slice(start, end);
}
/**
* Applies inline tool for specified range
*
* @param tool - name of the tool to apply
* @param start - start char index of the range
* @param end - end char index of the range
* @param [data] - inline tool data if applicable
* @returns {InlineNode[]} - array of nodes after applied formatting
*/
public format(tool: InlineToolName, start: number, end: number, data?: InlineToolData): InlineNode[] {
this.#validateIndex(start);
this.#validateIndex(end);
const formattingNode = new FormattingNode({
tool,
data,
});
const fragments: ChildNode[] = [];
/**
* If start index is greater than 0, we need extract part of the text before the start index
*/
if (start > 0) {
fragments.push(this.#cloneContents(0, start));
}
/**
* Formatting is applied to the specified range
*/
const formattedFragment = this.#cloneContents(start, end);
formattedFragment.appendTo(formattingNode);
fragments.push(formattingNode);
/**
* If end index is less than the text length, we need to extract part of the text after the end index
*/
if (end < this.length) {
fragments.push(this.#cloneContents(end, this.length));
}
this | .parent?.insertAfter(this, ...fragments); |
this.remove();
return fragments;
}
/**
* Splits current node into two nodes by the specified index
*
* @param index - char index where to split
* @returns {TextNode|null} - new node or null if split is not applicable
*/
public split(index: number): TextNode | null {
if (index === 0 || index === this.length) {
return null;
}
const newNode = new TextNode();
const text = this.removeText(index);
newNode.insertText(text);
this.parent?.insertAfter(this, newNode);
return newNode;
}
/**
* Validates index
*
* @param index - char index to validate
* @throws Error if index is out of the text length
*/
#validateIndex(index: number): void {
if (index < 0 || index > this.length) {
// Stryker disable next-line StringLiteral
throw new Error(`Index ${index} is not in valid range [0, ${this.length}]`);
}
}
/**
* Clones specified range to a new TextNode
*
* @param start - start char index of the range
* @param end - end char index of the range
*/
#cloneContents(start: number, end: number): TextNode {
return new TextNode({
value: this.getText(start, end),
});
}
}
| src/entities/TextNode/index.ts | editor-js-document-model-4cb9623 | [
{
"filename": "src/entities/TextNode/TextNode.spec.ts",
"retrieved_chunk": " const formattingNode = fragments[1] as FormattingNode;\n expect(formattingNode.children[0].getText()).toEqual(initialText.slice(start, end));\n });\n it('should call parent\\'s insertAfter with new nodes', () => {\n const name = createInlineToolName('bold');\n const start = 5;\n const end = 8;\n const fragments = node.format(name, start, end);\n expect(parentMock.insertAfter).toBeCalledWith(node, ...fragments);\n });",
"score": 37.61206687801001
},
{
"filename": "src/entities/FormattingNode/index.ts",
"retrieved_chunk": " return this.children.reduce((sum, child) => sum + child.length, 0);\n }\n /**\n * Returns serialized value of the node: text and formatting fragments\n */\n public get serialized(): InlineNodeSerialized {\n return {\n text: this.getText(),\n fragments: this.getFragments(),\n };",
"score": 33.71818176312534
},
{
"filename": "src/entities/FormattingNode/index.ts",
"retrieved_chunk": " * If child is not a FormattingNode, it doesn't include any fragments. So we skip it.\n */\n if (!(child instanceof FormattingNode)) {\n return acc;\n }\n acc.push(...child.getFragments(childStart, childEnd));\n return acc;\n },\n [ {\n tool: this.#tool,",
"score": 31.964758427156145
},
{
"filename": "src/entities/FormattingNode/index.ts",
"retrieved_chunk": " }\n /**\n * Removes text form the specified range\n *\n * @param [start] - start char index of the range, by default 0\n * @param [end] - end char index of the range, by default length of the text value\n * @returns {string} removed text\n */\n public removeText(start = 0, end = this.length): string {\n const result = this.#reduceChildrenInRange(",
"score": 31.682679647454986
},
{
"filename": "src/entities/FormattingNode/index.ts",
"retrieved_chunk": " return result;\n }\n /**\n * Returns text from the specified range\n *\n * @param [start] - start char index of the range, by default 0\n * @param [end] - end char index of the range, by default length of the text value\n */\n public getText(start = 0, end = this.length): string {\n return this.#reduceChildrenInRange(",
"score": 31.52460514406073
}
] | typescript | .parent?.insertAfter(this, ...fragments); |
import { BlockTuneConstructorParameters, BlockTuneName, BlockTuneSerialized, createBlockTuneName } from './types';
/**
* BlockTune class represents a set of additional information associated with a BlockNode.
* This information can be used by a BlockTool to modify the behavior of the BlockNode.
*/
export class BlockTune {
/**
* Private field representing the name of the tune
*/
#name: BlockTuneName;
/**
* Private field representing any additional data associated with the tune
*/
#data: Record<string, unknown>;
/**
* Constructor for BlockTune class.
*
* @param args - BlockTune constructor arguments.
* @param args.name - The name of the tune.
* @param args.data - Any additional data associated with the tune.
*/
constructor({ name, data }: BlockTuneConstructorParameters) {
this.#name = name;
this.#data = data;
}
/**
* Updates data associated with the tune.
*
* @param key - The key of the data to update
* @param value - The value to update the data with
*/
public update(key: string, value: unknown): void {
this.#data[key] = value;
}
/**
* Returns serialized version of the BlockTune.
*/
public get serialized( | ): BlockTuneSerialized { |
return {
name: this.#name,
data: this.#data,
};
}
}
export {
BlockTuneName,
createBlockTuneName
};
export type {
BlockTuneSerialized
};
| src/entities/BlockTune/index.ts | editor-js-document-model-4cb9623 | [
{
"filename": "src/entities/ValueNode/index.ts",
"retrieved_chunk": " /**\n * Updates the data associated with this value node.\n *\n * @param value - The new value of this value node.\n */\n public update(value: ValueType): void {\n this.#value = value;\n }\n /**\n * Returns serialized data associated with this value node.",
"score": 45.659705763293886
},
{
"filename": "src/entities/BlockNode/types/BlockNodeSerialized.ts",
"retrieved_chunk": " * The content of the Block\n */\n data: Record<string, unknown>; // @todo replace unknown type with serialized root node and value node\n /**\n * Serialized BlockTunes associated with the BlockNode\n */\n tunes: Record<string, BlockTuneSerialized>;\n}",
"score": 29.63997595318409
},
{
"filename": "src/entities/BlockTune/BlockTune.spec.ts",
"retrieved_chunk": " blockTune.update('align', 'left');\n // Assert\n expect(blockTune.serialized.data).toEqual({\n align: 'left',\n });\n });\n it('should update field in data object by key', () => {\n // Arrange\n const blockTune = createBlockTuneMock({\n data: {",
"score": 28.533878747776203
},
{
"filename": "src/entities/TextNode/index.ts",
"retrieved_chunk": " * Returns serialized value of the node\n */\n public get serialized(): InlineNodeSerialized {\n return {\n text: this.getText(),\n // No fragments for text node\n fragments: [],\n };\n }\n /**",
"score": 27.573093788894017
},
{
"filename": "src/entities/ValueNode/ValueNode.spec.ts",
"retrieved_chunk": " longitudeValueNode.update(updatedLongitude);\n // Assert\n expect(longitudeValueNode.serialized).toBe(updatedLongitude);\n });\n });\n describe('.serialized', () => {\n it('should return the serialized data associated with this value node', () => {\n // Arrange\n const longitude = 23.123;\n const longitudeValueNode = createValueNodeMock({",
"score": 26.79332866657188
}
] | typescript | ): BlockTuneSerialized { |
import { BlockNode } from './index';
import { createBlockNodeName, createDataKey } from './types';
import { BlockTune, createBlockTuneName } from '../BlockTune';
import { TextNode } from '../TextNode';
import { ValueNode } from '../ValueNode';
import type { EditorDocument } from '../EditorDocument';
import type { BlockTuneConstructorParameters } from '../BlockTune/types';
import type { TextNodeConstructorParameters } from '../TextNode';
import type { ValueNodeConstructorParameters } from '../ValueNode';
describe('BlockNode', () => {
describe('.serialized', () => {
beforeEach(() => {
jest.mock('../BlockTune', () => ({
BlockTune: jest.fn().mockImplementation(() => ({}) as BlockTune),
serialized: jest.fn(),
}));
jest.mock('../TextNode', () => ({
TextNode: jest.fn().mockImplementation(() => ({}) as TextNode),
serialized: jest.fn(),
}));
jest.mock('../ValueNode', () => ({
ValueNode: jest.fn().mockImplementation(() => ({}) as ValueNode),
serialized: jest.fn(),
}));
});
afterEach(() => {
jest.clearAllMocks();
});
it('should return a name of a tool that created a BlockNode', () => {
const blockNodeName = createBlockNodeName('paragraph');
const blockNode = new BlockNode({
name: blockNodeName,
data: {},
parent: {} as EditorDocument,
});
const serialized = blockNode.serialized;
expect(serialized.name).toEqual(blockNodeName);
});
it('should call .serialized getter of all tunes associated with the BlockNode', () => {
const blockTunesNames = [
createBlockTuneName('align'),
createBlockTuneName('font-size'),
createBlockTuneName('font-weight'),
];
const blockTunes = blockTunesNames.reduce((acc, name) => ({
...acc,
[name]: new BlockTune({} as BlockTuneConstructorParameters),
}), {});
const spyArray = Object
.values(blockTunes)
.map((blockTune) => {
return jest.spyOn(blockTune as BlockTune, 'serialized', 'get');
});
const blockNode = new BlockNode({
name: createBlockNodeName('paragraph'),
data: {},
parent: {} as EditorDocument,
tunes: blockTunes,
});
blockNode.serialized;
spyArray.forEach((spy) => {
expect(spy).toHaveBeenCalled();
});
});
it('should call .serialized getter of all child ValueNodes associated with the BlockNode', () => {
const countOfValueNodes = 2;
const valueNodes = [ ...Array(countOfValueNodes).keys() ]
.reduce((acc, index) => ({
...acc,
[ | createDataKey(`data-key-${index}c${index}d`)]: new ValueNode({} as ValueNodeConstructorParameters),
}), {}); |
const spyArray = Object
.values(valueNodes)
.map((valueNode) => {
return jest.spyOn(valueNode as ValueNode, 'serialized', 'get');
});
const blockNode = new BlockNode({
name: createBlockNodeName('paragraph'),
data: {
...valueNodes,
},
parent: {} as EditorDocument,
});
blockNode.serialized;
spyArray.forEach((spy) => {
expect(spy).toHaveBeenCalled();
});
});
it('should call .serialized getter of all child TextNodes associated with the BlockNode', () => {
const countOfTextNodes = 3;
const textNodes = [ ...Array(countOfTextNodes).keys() ]
.map(() => new TextNode({} as TextNodeConstructorParameters));
const spyArray = textNodes
.map((textNode) => {
return jest.spyOn(textNode, 'serialized', 'get');
});
const blockNode = new BlockNode({
name: createBlockNodeName('paragraph'),
data: {
[createDataKey('data-key-1a2b')]: textNodes,
},
parent: {} as EditorDocument,
});
blockNode.serialized;
spyArray.forEach((spy) => {
expect(spy).toHaveBeenCalled();
});
});
});
});
| src/entities/BlockNode/BlockNode.spec.ts | editor-js-document-model-4cb9623 | [
{
"filename": "src/entities/BlockNode/index.ts",
"retrieved_chunk": " if (value instanceof Array) {\n acc[dataKey] = value.map((node) => node.serialized);\n return acc;\n }\n acc[dataKey] = value.serialized;\n return acc;\n },\n {} as Record<string, unknown>\n );\n const serializedTunes = Object",
"score": 24.630792472648107
},
{
"filename": "src/entities/BlockNode/index.ts",
"retrieved_chunk": " .entries(this.#tunes)\n .reduce(\n (acc, [name, tune]) => {\n acc[name] = tune.serialized;\n return acc;\n },\n {} as Record<string, BlockTuneSerialized>\n );\n return {\n name: this.#name,",
"score": 22.572997485534355
},
{
"filename": "src/entities/ValueNode/index.ts",
"retrieved_chunk": "import type { ValueNodeConstructorParameters } from './types';\n/**\n * ValueNode class represents a node in a tree-like structure, used to store and manipulate data associated with a BlockNode.\n * Unlike TextNode, changing the data of a ValueNode will replace the entire data in this node.\n * This can be useful for storing data that needs to be updated in its entirety, such as a link or other metadata associated with a BlockNode.\n */\nexport class ValueNode<ValueType = unknown> {\n /**\n * Private field representing the data associated with this node\n */",
"score": 17.794458051455653
},
{
"filename": "src/entities/FormattingNode/index.ts",
"retrieved_chunk": " return [];\n }\n return this.#reduceChildrenInRange<InlineNode[]>(\n start,\n end,\n (acc, child, childStart, childEnd) => {\n acc.push(...child.format(tool, childStart, childEnd, data));\n return acc;\n },\n []",
"score": 15.690400029421978
},
{
"filename": "src/entities/BlockNode/index.ts",
"retrieved_chunk": " public get serialized(): BlockNodeSerialized {\n const serializedData = Object\n .entries(this.#data)\n .reduce(\n (acc, [dataKey, value]) => {\n /**\n * If the value is an array, we need to serialize each node in the array\n * Value is an array if the BlockNode contains TextNodes and FormattingNodes\n * After serializing there will be InlineNodeSerialized object\n */",
"score": 14.424830895467284
}
] | typescript | createDataKey(`data-key-${index}c${index}d`)]: new ValueNode({} as ValueNodeConstructorParameters),
}), {}); |
import { EditorDocument } from '../EditorDocument';
import { BlockTune, BlockTuneName, BlockTuneSerialized } from '../BlockTune';
import {
BlockNodeConstructorParameters,
BlockNodeName,
createBlockNodeName,
DataKey,
createDataKey, BlockNodeData,
BlockNodeSerialized
} from './types';
/**
* BlockNode class represents a node in a tree-like structure used to store and manipulate Blocks in an editor document.
* A BlockNode can contain one or more child nodes of type TextNode, ValueNode or FormattingNode.
* It can also be associated with one or more BlockTunes, which can modify the behavior of the BlockNode.
*/
export class BlockNode {
/**
* Field representing a name of the Tool created this Block
*/
#name: BlockNodeName;
/**
* Field representing the content of the Block
*/
#data: BlockNodeData;
/**
* Field representing the parent EditorDocument of the BlockNode
*/
#parent: EditorDocument;
/**
* Private field representing the BlockTunes associated with the BlockNode
*/
#tunes: Record<BlockTuneName, BlockTune>;
/**
* Constructor for BlockNode class.
*
* @param args - TextNode constructor arguments.
* @param args.name - The name of the BlockNode.
* @param args.data - The content of the BlockNode.
* @param args.parent - The parent EditorDocument of the BlockNode.
* @param args.tunes - The BlockTunes associated with the BlockNode.
*/
constructor({ name, data, parent, tunes = {} }: BlockNodeConstructorParameters) {
this.#name = name;
this.#data = data;
this.#parent = parent;
this.#tunes = tunes;
}
/**
* Returns serialized object representing the BlockNode
*/
public get serialized(): BlockNodeSerialized {
const serializedData = Object
.entries(this.#data)
.reduce(
(acc, [dataKey, value]) => {
/**
* If the value is an array, we need to serialize each node in the array
* Value is an array if the BlockNode contains TextNodes and FormattingNodes
* After serializing there will be InlineNodeSerialized object
*/
if (value instanceof Array) {
acc[dataKey] = value.map((node) => node.serialized);
return acc;
}
acc[dataKey] = value.serialized;
return acc;
},
{} as Record<string, unknown>
);
const serializedTunes = Object
.entries(this.#tunes)
.reduce(
(acc, [name, tune]) => {
| acc[name] = tune.serialized; |
return acc;
},
{} as Record<string, BlockTuneSerialized>
);
return {
name: this.#name,
data: serializedData,
tunes: serializedTunes,
};
}
}
export {
BlockNodeName,
createBlockNodeName,
DataKey,
createDataKey
};
| src/entities/BlockNode/index.ts | editor-js-document-model-4cb9623 | [
{
"filename": "src/entities/BlockNode/BlockNode.spec.ts",
"retrieved_chunk": " createBlockTuneName('align'),\n createBlockTuneName('font-size'),\n createBlockTuneName('font-weight'),\n ];\n const blockTunes = blockTunesNames.reduce((acc, name) => ({\n ...acc,\n [name]: new BlockTune({} as BlockTuneConstructorParameters),\n }), {});\n const spyArray = Object\n .values(blockTunes)",
"score": 38.39840779588387
},
{
"filename": "src/entities/FormattingNode/index.ts",
"retrieved_chunk": " return [];\n }\n return this.#reduceChildrenInRange<InlineNode[]>(\n start,\n end,\n (acc, child, childStart, childEnd) => {\n acc.push(...child.format(tool, childStart, childEnd, data));\n return acc;\n },\n []",
"score": 27.222521034208462
},
{
"filename": "src/entities/BlockNode/BlockNode.spec.ts",
"retrieved_chunk": " spyArray.forEach((spy) => {\n expect(spy).toHaveBeenCalled();\n });\n });\n it('should call .serialized getter of all child ValueNodes associated with the BlockNode', () => {\n const countOfValueNodes = 2;\n const valueNodes = [ ...Array(countOfValueNodes).keys() ]\n .reduce((acc, index) => ({\n ...acc,\n [createDataKey(`data-key-${index}c${index}d`)]: new ValueNode({} as ValueNodeConstructorParameters),",
"score": 26.800131578852163
},
{
"filename": "src/entities/FormattingNode/index.ts",
"retrieved_chunk": " start,\n end,\n (acc, child, childStart, childEnd) => {\n return acc + child.removeText(childStart, childEnd);\n },\n ''\n );\n if (this.length === 0) {\n this.remove();\n }",
"score": 25.06750732164301
},
{
"filename": "src/entities/FormattingNode/index.ts",
"retrieved_chunk": " * If child is not a FormattingNode, it doesn't include any fragments. So we skip it.\n */\n if (!(child instanceof FormattingNode)) {\n return acc;\n }\n acc.push(...child.getFragments(childStart, childEnd));\n return acc;\n },\n [ {\n tool: this.#tool,",
"score": 24.181437446466663
}
] | typescript | acc[name] = tune.serialized; |
import { FormattingNode, InlineToolName, InlineToolData } from '../FormattingNode';
import { TextNodeConstructorParameters } from './types';
import { ChildNode, InlineNode, InlineNodeSerialized } from '../interfaces';
export * from './types';
export interface TextNode extends ChildNode {}
/**
* TextNode class represents a node in a tree-like structure, used to store and manipulate text content.
*/
@ChildNode
export class TextNode implements InlineNode {
/**
* Private field representing the text content of the node
*/
#value: string;
/**
* Constructor for TextNode class
*
* @param args - TextNode constructor arguments.
* @param args.value - Text content of the node.
*/
constructor({ value = '' }: TextNodeConstructorParameters = {}) {
this.#value = value;
}
/**
* Returns length of the text
*/
public get length(): number {
return this.#value.length;
}
/**
* Returns serialized value of the node
*/
public get serialized(): InlineNodeSerialized {
return {
text: this.getText(),
// No fragments for text node
fragments: [],
};
}
/**
* Inserts text to specified position. By default, appends new text to the current value
*
* @param text - text to insert
* @param [index] - char start index
*/
public insertText(text: string, index = this.length): void {
this.#validateIndex(index);
this.#value = this.#value.slice(0, index) + text + this.#value.slice(index);
}
/**
* Remove text from specified range
*
* @param [start] - start char index of the range, 0 by default
* @param [end] - end char index of the range, text length by default
* @returns {string} removed text
*/
public removeText(start = 0, end = this.length): string {
this.#validateIndex(start);
this.#validateIndex(end);
const removedValue = this.#value.slice(start, end);
this.#value = this.#value.slice(0, start) + this.#value.slice(end);
if (this.length === 0) {
this.remove();
}
return removedValue;
}
/**
* Returns text value from the specified range
*
* @param [start] - start char index of the range, 0 by default
* @param [end] - end char index of the range, text length by default
*/
public getText(start = 0, end = this.length): string {
if (start > end) {
// Stryker disable next-line StringLiteral
throw new Error(`Start index ${start} should be less or equal than end index ${end}`);
}
this.#validateIndex(start);
this.#validateIndex(end);
return this.#value.slice(start, end);
}
/**
* Applies inline tool for specified range
*
* @param tool - name of the tool to apply
* @param start - start char index of the range
* @param end - end char index of the range
* @param [data] - inline tool data if applicable
* @returns {InlineNode[]} - array of nodes after applied formatting
*/
public format(tool: InlineToolName, start: number, end: number, data?: InlineToolData): InlineNode[] {
this.#validateIndex(start);
this.#validateIndex(end);
const formattingNode = new FormattingNode({
tool,
data,
});
const fragments: ChildNode[] = [];
/**
* If start index is greater than 0, we need extract part of the text before the start index
*/
if (start > 0) {
fragments.push(this.#cloneContents(0, start));
}
/**
* Formatting is applied to the specified range
*/
const formattedFragment = this.#cloneContents(start, end);
formattedFragment. | appendTo(formattingNode); |
fragments.push(formattingNode);
/**
* If end index is less than the text length, we need to extract part of the text after the end index
*/
if (end < this.length) {
fragments.push(this.#cloneContents(end, this.length));
}
this.parent?.insertAfter(this, ...fragments);
this.remove();
return fragments;
}
/**
* Splits current node into two nodes by the specified index
*
* @param index - char index where to split
* @returns {TextNode|null} - new node or null if split is not applicable
*/
public split(index: number): TextNode | null {
if (index === 0 || index === this.length) {
return null;
}
const newNode = new TextNode();
const text = this.removeText(index);
newNode.insertText(text);
this.parent?.insertAfter(this, newNode);
return newNode;
}
/**
* Validates index
*
* @param index - char index to validate
* @throws Error if index is out of the text length
*/
#validateIndex(index: number): void {
if (index < 0 || index > this.length) {
// Stryker disable next-line StringLiteral
throw new Error(`Index ${index} is not in valid range [0, ${this.length}]`);
}
}
/**
* Clones specified range to a new TextNode
*
* @param start - start char index of the range
* @param end - end char index of the range
*/
#cloneContents(start: number, end: number): TextNode {
return new TextNode({
value: this.getText(start, end),
});
}
}
| src/entities/TextNode/index.ts | editor-js-document-model-4cb9623 | [
{
"filename": "src/entities/FormattingNode/index.ts",
"retrieved_chunk": " return result;\n }\n /**\n * Returns text from the specified range\n *\n * @param [start] - start char index of the range, by default 0\n * @param [end] - end char index of the range, by default length of the text value\n */\n public getText(start = 0, end = this.length): string {\n return this.#reduceChildrenInRange(",
"score": 23.490025082411492
},
{
"filename": "src/entities/FormattingNode/index.ts",
"retrieved_chunk": " }\n /**\n * Removes text form the specified range\n *\n * @param [start] - start char index of the range, by default 0\n * @param [end] - end char index of the range, by default length of the text value\n * @returns {string} removed text\n */\n public removeText(start = 0, end = this.length): string {\n const result = this.#reduceChildrenInRange(",
"score": 23.04668347809475
},
{
"filename": "src/entities/TextNode/TextNode.spec.ts",
"retrieved_chunk": " const formattingNode = fragments[1] as FormattingNode;\n expect(formattingNode.children[0].getText()).toEqual(initialText.slice(start, end));\n });\n it('should call parent\\'s insertAfter with new nodes', () => {\n const name = createInlineToolName('bold');\n const start = 5;\n const end = 8;\n const fragments = node.format(name, start, end);\n expect(parentMock.insertAfter).toBeCalledWith(node, ...fragments);\n });",
"score": 22.737249067138215
},
{
"filename": "src/entities/FormattingNode/index.ts",
"retrieved_chunk": " *\n * @param [start] - start char index of the range, by default 0\n * @param [end] - end char index of the range, by default length of the text value\n */\n public getFragments(start = 0, end = this.length): InlineFragment[] {\n return this.#reduceChildrenInRange<InlineFragment[]>(\n start,\n end,\n (acc, child, childStart, childEnd) => {\n /**",
"score": 21.920004428957007
},
{
"filename": "src/entities/TextNode/TextNode.spec.ts",
"retrieved_chunk": " const end = 5;\n const fragments = node.format(name, 0, end);\n expect(fragments).toHaveLength(2);\n expect(fragments[0]).toBeInstanceOf(FormattingNode);\n expect(fragments[1]).toBeInstanceOf(TextNode);\n });\n it('should return two fragments if formatting to the end, but not from the start', () => {\n const name = createInlineToolName('bold');\n const start = 5;\n const fragments = node.format(name, start, initialText.length);",
"score": 20.182571446010424
}
] | typescript | appendTo(formattingNode); |
import isGlob from 'is-glob'
import micromatch from 'micromatch'
import type { IsTargetFilterPath } from './interfaces/pathFilter'
import { ERRORS } from './errors'
const isStringPath = (pathFilter?: string | string[]): pathFilter is string => {
return typeof pathFilter === 'string' && !isGlob(pathFilter)
}
const isGlobPath = (pattern?: string | string[]): pattern is string => {
return typeof pattern === 'string' && isGlob(pattern)
}
const isMultiPath = (
pathFilter?: string | string[]
): pathFilter is string[] => {
return Array.isArray(pathFilter)
}
const matchSingleStringPath = (
pathname: string,
pathFilter?: string
): boolean => {
if (!pathFilter) return false
return pathname.indexOf(pathFilter) >= 0
}
const matchMultiPath = (pathname: string, pathFilterList: string[]) => {
return pathFilterList.some((pattern) =>
matchSingleStringPath(pathname, pattern)
)
}
const matchSingleGlobPath = (
pathname: string,
pattern?: string | string[]
): boolean => {
if (!pattern) return false
const matches = micromatch([pathname], pattern)
return matches && matches.length > 0
}
const matchMultiGlobPath = (pathname: string, patterns?: string | string[]) => {
return matchSingleGlobPath(pathname, patterns)
}
/**
* checkout weather the path is target filter path
*/
const isTargetFilterPath: IsTargetFilterPath = (
pathname = '',
{ pathFilter, req }
) => {
// custom path filter
if (typeof pathFilter === 'function') {
return pathFilter(pathname, req)
}
// single glob
if (isGlobPath(pathFilter)) {
return matchSingleGlobPath(pathname, pathFilter)
}
// single string
if (isStringPath(pathFilter)) {
return matchSingleStringPath(pathname, pathFilter)
}
// multi path
if (isMultiPath(pathFilter)) {
if (pathFilter.every(isStringPath)) {
return matchMultiPath(pathname, pathFilter)
}
if ((pathFilter as string[]).every(isGlobPath)) {
return matchMultiGlobPath(pathname, pathFilter)
}
throw | new Error(ERRORS.ERR_CONTEXT_MATCHER_INVALID_ARRAY)
} |
return true
}
export { isTargetFilterPath }
| src/pathFilter.ts | yisibell-h3-proxy-46e4021 | [
{
"filename": "src/index.ts",
"retrieved_chunk": " const { target, pathFilter, pathRewrite, enableLogger, loggerOptions } =\n finalOptions\n if (!target) {\n throw new Error(ERRORS.ERR_CONFIG_FACTORY_TARGET_MISSING)\n }\n const logger = createLogger({\n enableLogger,\n loggerOptions,\n })\n return async (event) => {",
"score": 16.34689554892268
},
{
"filename": "src/interfaces/pathFilter.ts",
"retrieved_chunk": "import type { IncomingMessage } from 'http'\nexport type CustomPathFilter = (\n pathname: string,\n req: IncomingMessage\n) => boolean\nexport type PathFilterParams = string | string[] | CustomPathFilter\nexport type IsTargetFilterPath = (\n pathname: string | undefined,\n opts: {\n pathFilter?: PathFilterParams",
"score": 12.67508105191613
},
{
"filename": "src/pathRewriter.ts",
"retrieved_chunk": " if (typeof rewriteConfig === 'function') {\n return true\n } else if (isPlainObj(rewriteConfig)) {\n return Object.keys(rewriteConfig as object).length !== 0\n } else if (rewriteConfig === undefined || rewriteConfig === null) {\n return false\n } else {\n throw new Error(ERRORS.ERR_PATH_REWRITER_CONFIG)\n }\n}",
"score": 11.847542906020411
},
{
"filename": "src/urlParser.ts",
"retrieved_chunk": "import { URL } from 'node:url'\nconst parseUrlToObject = (url: string) => {\n return new URL(url)\n}\nconst getUrlPath = (url?: string, base?: string) => {\n if (!url) return ''\n const { pathname, search } = new URL(url, base)\n return `${pathname}${search}`\n}\nexport { parseUrlToObject, getUrlPath }",
"score": 10.679619778382264
},
{
"filename": "src/errors.ts",
"retrieved_chunk": "export enum ERRORS {\n ERR_CONFIG_FACTORY_TARGET_MISSING = '[h3-proxy] Missing \"target\" option. Example: {target: \"http://www.example.org\"}',\n ERR_CONTEXT_MATCHER_INVALID_ARRAY = '[h3-proxy] Invalid pathFilter. Expecting something like: [\"/api\", \"/ajax\"] or [\"/api/**\", \"!**.html\"]',\n ERR_PATH_REWRITER_CONFIG = '[h3-proxy] Invalid pathRewrite config. Expecting object with pathRewrite config or a rewrite function',\n}",
"score": 7.777031679349145
}
] | typescript | new Error(ERRORS.ERR_CONTEXT_MATCHER_INVALID_ARRAY)
} |
import { BlockNode } from './index';
import { createBlockNodeName, createDataKey } from './types';
import { BlockTune, createBlockTuneName } from '../BlockTune';
import { TextNode } from '../TextNode';
import { ValueNode } from '../ValueNode';
import type { EditorDocument } from '../EditorDocument';
import type { BlockTuneConstructorParameters } from '../BlockTune/types';
import type { TextNodeConstructorParameters } from '../TextNode';
import type { ValueNodeConstructorParameters } from '../ValueNode';
describe('BlockNode', () => {
describe('.serialized', () => {
beforeEach(() => {
jest.mock('../BlockTune', () => ({
BlockTune: jest.fn().mockImplementation(() => ({}) as BlockTune),
serialized: jest.fn(),
}));
jest.mock('../TextNode', () => ({
TextNode: jest.fn().mockImplementation(() => ({}) as TextNode),
serialized: jest.fn(),
}));
jest.mock('../ValueNode', () => ({
ValueNode: jest.fn().mockImplementation(() => ({}) as ValueNode),
serialized: jest.fn(),
}));
});
afterEach(() => {
jest.clearAllMocks();
});
it('should return a name of a tool that created a BlockNode', () => {
const blockNodeName = createBlockNodeName('paragraph');
const blockNode = new BlockNode({
name: blockNodeName,
data: {},
parent: {} as EditorDocument,
});
const serialized = blockNode.serialized;
expect(serialized.name).toEqual(blockNodeName);
});
it('should call .serialized getter of all tunes associated with the BlockNode', () => {
const blockTunesNames = [
createBlockTuneName('align'),
createBlockTuneName('font-size'),
createBlockTuneName('font-weight'),
];
const blockTunes = blockTunesNames.reduce((acc, name) => ({
...acc,
[name]: new BlockTune({} as BlockTuneConstructorParameters),
}), {});
const spyArray = Object
.values(blockTunes)
.map((blockTune) => {
return jest.spyOn( | blockTune as BlockTune, 'serialized', 'get'); |
});
const blockNode = new BlockNode({
name: createBlockNodeName('paragraph'),
data: {},
parent: {} as EditorDocument,
tunes: blockTunes,
});
blockNode.serialized;
spyArray.forEach((spy) => {
expect(spy).toHaveBeenCalled();
});
});
it('should call .serialized getter of all child ValueNodes associated with the BlockNode', () => {
const countOfValueNodes = 2;
const valueNodes = [ ...Array(countOfValueNodes).keys() ]
.reduce((acc, index) => ({
...acc,
[createDataKey(`data-key-${index}c${index}d`)]: new ValueNode({} as ValueNodeConstructorParameters),
}), {});
const spyArray = Object
.values(valueNodes)
.map((valueNode) => {
return jest.spyOn(valueNode as ValueNode, 'serialized', 'get');
});
const blockNode = new BlockNode({
name: createBlockNodeName('paragraph'),
data: {
...valueNodes,
},
parent: {} as EditorDocument,
});
blockNode.serialized;
spyArray.forEach((spy) => {
expect(spy).toHaveBeenCalled();
});
});
it('should call .serialized getter of all child TextNodes associated with the BlockNode', () => {
const countOfTextNodes = 3;
const textNodes = [ ...Array(countOfTextNodes).keys() ]
.map(() => new TextNode({} as TextNodeConstructorParameters));
const spyArray = textNodes
.map((textNode) => {
return jest.spyOn(textNode, 'serialized', 'get');
});
const blockNode = new BlockNode({
name: createBlockNodeName('paragraph'),
data: {
[createDataKey('data-key-1a2b')]: textNodes,
},
parent: {} as EditorDocument,
});
blockNode.serialized;
spyArray.forEach((spy) => {
expect(spy).toHaveBeenCalled();
});
});
});
});
| src/entities/BlockNode/BlockNode.spec.ts | editor-js-document-model-4cb9623 | [
{
"filename": "src/entities/BlockNode/index.ts",
"retrieved_chunk": " .entries(this.#tunes)\n .reduce(\n (acc, [name, tune]) => {\n acc[name] = tune.serialized;\n return acc;\n },\n {} as Record<string, BlockTuneSerialized>\n );\n return {\n name: this.#name,",
"score": 34.58343997936057
},
{
"filename": "src/entities/BlockNode/index.ts",
"retrieved_chunk": " if (value instanceof Array) {\n acc[dataKey] = value.map((node) => node.serialized);\n return acc;\n }\n acc[dataKey] = value.serialized;\n return acc;\n },\n {} as Record<string, unknown>\n );\n const serializedTunes = Object",
"score": 34.04617642043353
},
{
"filename": "src/entities/BlockTune/types/BlockTuneSerialized.ts",
"retrieved_chunk": "/**\n * BlockTuneSerialized represents a serialized version of a BlockTune.\n */\nexport interface BlockTuneSerialized {\n /**\n * The name of the tune.\n * Serialized as a string.\n */\n name: string;\n /**",
"score": 21.091852780542542
},
{
"filename": "src/entities/BlockTune/index.ts",
"retrieved_chunk": " */\n public update(key: string, value: unknown): void {\n this.#data[key] = value;\n }\n /**\n * Returns serialized version of the BlockTune.\n */\n public get serialized(): BlockTuneSerialized {\n return {\n name: this.#name,",
"score": 19.652901729409603
},
{
"filename": "src/utils/mocks/createBlockTuneMock.ts",
"retrieved_chunk": " data?: Record<string, unknown>,\n}): BlockTune {\n return new BlockTune(\n {\n name: name || createBlockTuneName('aligning'),\n data: data || {},\n }\n );\n}",
"score": 18.67901641021891
}
] | typescript | blockTune as BlockTune, 'serialized', 'get'); |
import isGlob from 'is-glob'
import micromatch from 'micromatch'
import type { IsTargetFilterPath } from './interfaces/pathFilter'
import { ERRORS } from './errors'
const isStringPath = (pathFilter?: string | string[]): pathFilter is string => {
return typeof pathFilter === 'string' && !isGlob(pathFilter)
}
const isGlobPath = (pattern?: string | string[]): pattern is string => {
return typeof pattern === 'string' && isGlob(pattern)
}
const isMultiPath = (
pathFilter?: string | string[]
): pathFilter is string[] => {
return Array.isArray(pathFilter)
}
const matchSingleStringPath = (
pathname: string,
pathFilter?: string
): boolean => {
if (!pathFilter) return false
return pathname.indexOf(pathFilter) >= 0
}
const matchMultiPath = (pathname: string, pathFilterList: string[]) => {
return pathFilterList.some((pattern) =>
matchSingleStringPath(pathname, pattern)
)
}
const matchSingleGlobPath = (
pathname: string,
pattern?: string | string[]
): boolean => {
if (!pattern) return false
const matches = micromatch([pathname], pattern)
return matches && matches.length > 0
}
const matchMultiGlobPath = (pathname: string, patterns?: string | string[]) => {
return matchSingleGlobPath(pathname, patterns)
}
/**
* checkout weather the path is target filter path
*/
| const isTargetFilterPath: IsTargetFilterPath = (
pathname = '',
{ pathFilter, req } |
) => {
// custom path filter
if (typeof pathFilter === 'function') {
return pathFilter(pathname, req)
}
// single glob
if (isGlobPath(pathFilter)) {
return matchSingleGlobPath(pathname, pathFilter)
}
// single string
if (isStringPath(pathFilter)) {
return matchSingleStringPath(pathname, pathFilter)
}
// multi path
if (isMultiPath(pathFilter)) {
if (pathFilter.every(isStringPath)) {
return matchMultiPath(pathname, pathFilter)
}
if ((pathFilter as string[]).every(isGlobPath)) {
return matchMultiGlobPath(pathname, pathFilter)
}
throw new Error(ERRORS.ERR_CONTEXT_MATCHER_INVALID_ARRAY)
}
return true
}
export { isTargetFilterPath }
| src/pathFilter.ts | yisibell-h3-proxy-46e4021 | [
{
"filename": "src/interfaces/pathFilter.ts",
"retrieved_chunk": "import type { IncomingMessage } from 'http'\nexport type CustomPathFilter = (\n pathname: string,\n req: IncomingMessage\n) => boolean\nexport type PathFilterParams = string | string[] | CustomPathFilter\nexport type IsTargetFilterPath = (\n pathname: string | undefined,\n opts: {\n pathFilter?: PathFilterParams",
"score": 18.638069391088464
},
{
"filename": "src/urlParser.ts",
"retrieved_chunk": "import { URL } from 'node:url'\nconst parseUrlToObject = (url: string) => {\n return new URL(url)\n}\nconst getUrlPath = (url?: string, base?: string) => {\n if (!url) return ''\n const { pathname, search } = new URL(url, base)\n return `${pathname}${search}`\n}\nexport { parseUrlToObject, getUrlPath }",
"score": 12.53132078738406
},
{
"filename": "src/index.ts",
"retrieved_chunk": " const { req } = event.node\n const path = getUrlPath(req.url, target)\n // generate proxy request options via default strategy\n const proxyRequestOptions = createProxyRequestOptions(event, finalOptions)\n if (isTargetFilterPath(path, { pathFilter, req })) {\n const pathRewriter = createPathRewriter(pathRewrite, logger)\n let rewritedPath = path\n if (pathRewriter) {\n rewritedPath = await pathRewriter(path, req)\n }",
"score": 12.010482372287331
},
{
"filename": "src/interfaces/pathRewriter.ts",
"retrieved_chunk": "import type { IncomingMessage } from 'http'\nimport type { Logger } from './logger'\nexport type CustomPathRewriter = (\n pathname: string,\n req: IncomingMessage\n) => string | Promise<string>\nexport type RewriteRecord = Record<string, string>\nexport type PathRewriterParams = RewriteRecord | CustomPathRewriter\nexport type CreatePathRewriter = (\n pathRewrite?: PathRewriterParams,",
"score": 11.508481888621192
},
{
"filename": "src/pathRewriter.ts",
"retrieved_chunk": " let rulesCache: RewriteRule[]\n function rewritePath(path: string) {\n let result = path\n for (const rule of rulesCache) {\n if (rule.regex.test(path)) {\n result = result.replace(rule.regex, rule.value)\n logger && logger.info('rewriting path from \"%s\" to \"%s\"', path, result)\n break\n }\n }",
"score": 9.144165051847127
}
] | typescript | const isTargetFilterPath: IsTargetFilterPath = (
pathname = '',
{ pathFilter, req } |
import { BlockNode } from './index';
import { createBlockNodeName, createDataKey } from './types';
import { BlockTune, createBlockTuneName } from '../BlockTune';
import { TextNode } from '../TextNode';
import { ValueNode } from '../ValueNode';
import type { EditorDocument } from '../EditorDocument';
import type { BlockTuneConstructorParameters } from '../BlockTune/types';
import type { TextNodeConstructorParameters } from '../TextNode';
import type { ValueNodeConstructorParameters } from '../ValueNode';
describe('BlockNode', () => {
describe('.serialized', () => {
beforeEach(() => {
jest.mock('../BlockTune', () => ({
BlockTune: jest.fn().mockImplementation(() => ({}) as BlockTune),
serialized: jest.fn(),
}));
jest.mock('../TextNode', () => ({
TextNode: jest.fn().mockImplementation(() => ({}) as TextNode),
serialized: jest.fn(),
}));
jest.mock('../ValueNode', () => ({
ValueNode: jest.fn().mockImplementation(() => ({}) as ValueNode),
serialized: jest.fn(),
}));
});
afterEach(() => {
jest.clearAllMocks();
});
it('should return a name of a tool that created a BlockNode', () => {
const blockNodeName = createBlockNodeName('paragraph');
const blockNode = new BlockNode({
name: blockNodeName,
data: {},
parent: {} as EditorDocument,
});
const serialized = blockNode.serialized;
expect(serialized.name).toEqual(blockNodeName);
});
it('should call .serialized getter of all tunes associated with the BlockNode', () => {
const blockTunesNames = [
createBlockTuneName('align'),
createBlockTuneName('font-size'),
createBlockTuneName('font-weight'),
];
const blockTunes = blockTunesNames.reduce((acc, name) => ({
...acc,
[name]: new BlockTune({} as BlockTuneConstructorParameters),
}), {});
const spyArray = Object
.values(blockTunes)
.map((blockTune) => {
return jest.spyOn(blockTune as BlockTune, 'serialized', 'get');
});
const blockNode = new BlockNode({
name: createBlockNodeName('paragraph'),
data: {},
parent: {} as EditorDocument,
tunes: blockTunes,
});
blockNode.serialized;
spyArray.forEach((spy) => {
expect(spy).toHaveBeenCalled();
});
});
it('should call .serialized getter of all child ValueNodes associated with the BlockNode', () => {
const countOfValueNodes = 2;
const valueNodes = [ ...Array(countOfValueNodes).keys() ]
.reduce((acc, index) => ({
...acc,
[createDataKey(`data-key-${index}c${index}d`)]: new ValueNode({} as ValueNodeConstructorParameters),
}), {});
const spyArray = Object
.values(valueNodes)
.map((valueNode) => {
return jest.spyOn(valueNode as ValueNode, 'serialized', 'get');
});
const blockNode = new BlockNode({
name: createBlockNodeName('paragraph'),
data: {
...valueNodes,
},
parent: {} as EditorDocument,
});
blockNode.serialized;
spyArray.forEach((spy) => {
expect(spy).toHaveBeenCalled();
});
});
it('should call .serialized getter of all child TextNodes associated with the BlockNode', () => {
const countOfTextNodes = 3;
const textNodes = [ ...Array(countOfTextNodes).keys() ]
| .map(() => new TextNode({} as TextNodeConstructorParameters)); |
const spyArray = textNodes
.map((textNode) => {
return jest.spyOn(textNode, 'serialized', 'get');
});
const blockNode = new BlockNode({
name: createBlockNodeName('paragraph'),
data: {
[createDataKey('data-key-1a2b')]: textNodes,
},
parent: {} as EditorDocument,
});
blockNode.serialized;
spyArray.forEach((spy) => {
expect(spy).toHaveBeenCalled();
});
});
});
});
| src/entities/BlockNode/BlockNode.spec.ts | editor-js-document-model-4cb9623 | [
{
"filename": "src/entities/BlockNode/index.ts",
"retrieved_chunk": " if (value instanceof Array) {\n acc[dataKey] = value.map((node) => node.serialized);\n return acc;\n }\n acc[dataKey] = value.serialized;\n return acc;\n },\n {} as Record<string, unknown>\n );\n const serializedTunes = Object",
"score": 21.643261644313686
},
{
"filename": "src/entities/ValueNode/ValueNode.spec.ts",
"retrieved_chunk": " longitudeValueNode.update(updatedLongitude);\n // Assert\n expect(longitudeValueNode.serialized).toBe(updatedLongitude);\n });\n });\n describe('.serialized', () => {\n it('should return the serialized data associated with this value node', () => {\n // Arrange\n const longitude = 23.123;\n const longitudeValueNode = createValueNodeMock({",
"score": 14.974237391098567
},
{
"filename": "src/entities/FormattingNode/FormattingNode.spec.ts",
"retrieved_chunk": " describe('.serialized', () => {\n it('should return concatenated text of all fragments with fragments list describing formatting', () => {\n const result = node.serialized;\n expect(result).toEqual({\n text: childMock.getText() + anotherChildMock.getText(),\n fragments: [\n {\n tool,\n data,\n range: [0, node.length],",
"score": 14.499459894455152
},
{
"filename": "src/entities/TextNode/TextNode.spec.ts",
"retrieved_chunk": " expect(fragments).toHaveLength(3);\n expect(fragments[0]).toBeInstanceOf(TextNode);\n expect(fragments[1]).toBeInstanceOf(FormattingNode);\n expect(fragments[2]).toBeInstanceOf(TextNode);\n });\n it('should return FormattingNode with a TextNode as a child with correct text value', () => {\n const name = createInlineToolName('bold');\n const start = 5;\n const end = 8;\n const fragments = node.format(name, start, end);",
"score": 14.016795938416765
},
{
"filename": "src/entities/BlockNode/types/BlockNodeSerialized.ts",
"retrieved_chunk": " * The content of the Block\n */\n data: Record<string, unknown>; // @todo replace unknown type with serialized root node and value node\n /**\n * Serialized BlockTunes associated with the BlockNode\n */\n tunes: Record<string, BlockTuneSerialized>;\n}",
"score": 12.81043056071841
}
] | typescript | .map(() => new TextNode({} as TextNodeConstructorParameters)); |
import { createBlockTuneName } from './index';
import { createBlockTuneMock } from '../../utils/mocks/createBlockTuneMock';
describe('BlockTune', () => {
describe('.update()', () => {
it('should add field to data object by key if it doesn\'t exist', () => {
// Arrange
const blockTune = createBlockTuneMock({
data: {},
});
// Act
blockTune.update('align', 'left');
// Assert
expect(blockTune.serialized.data).toEqual({
align: 'left',
});
});
it('should update field in data object by key', () => {
// Arrange
const blockTune = createBlockTuneMock({
data: {
align: 'center',
},
});
// Act
blockTune.update('align', 'right');
// Assert
expect(blockTune.serialized.data).toEqual({
align: 'right',
});
});
});
describe('.serialized', () => {
it('should return serialized version of the BlockTune', () => {
// Arrange
const tune = createBlockTuneMock({
| name: createBlockTuneName('styling'),
data: { |
background: 'transparent',
},
});
// Act
const tuneSerialized = tune.serialized;
// Assert
expect(tuneSerialized).toEqual(
{
name: 'styling',
data: {
background: 'transparent',
},
}
);
});
});
});
| src/entities/BlockTune/BlockTune.spec.ts | editor-js-document-model-4cb9623 | [
{
"filename": "src/utils/mocks/createBlockTuneMock.ts",
"retrieved_chunk": "import { BlockTune, BlockTuneName, createBlockTuneName } from '../../entities/BlockTune';\n/**\n * Creates a BlockTune instance with the given name and data.\n *\n * @param args - BlockTune constructor arguments.\n * @param args.name - The name of the tune.\n * @param args.data - Any additional data associated with the tune.\n */\nexport function createBlockTuneMock({ name, data }: {\n name?: BlockTuneName,",
"score": 21.58998922556795
},
{
"filename": "src/entities/BlockTune/types/BlockTuneSerialized.ts",
"retrieved_chunk": "/**\n * BlockTuneSerialized represents a serialized version of a BlockTune.\n */\nexport interface BlockTuneSerialized {\n /**\n * The name of the tune.\n * Serialized as a string.\n */\n name: string;\n /**",
"score": 20.369368288301736
},
{
"filename": "src/entities/BlockTune/index.ts",
"retrieved_chunk": " */\n public update(key: string, value: unknown): void {\n this.#data[key] = value;\n }\n /**\n * Returns serialized version of the BlockTune.\n */\n public get serialized(): BlockTuneSerialized {\n return {\n name: this.#name,",
"score": 20.271177414801727
},
{
"filename": "src/entities/BlockNode/BlockNode.spec.ts",
"retrieved_chunk": " createBlockTuneName('align'),\n createBlockTuneName('font-size'),\n createBlockTuneName('font-weight'),\n ];\n const blockTunes = blockTunesNames.reduce((acc, name) => ({\n ...acc,\n [name]: new BlockTune({} as BlockTuneConstructorParameters),\n }), {});\n const spyArray = Object\n .values(blockTunes)",
"score": 17.458370546796594
},
{
"filename": "src/utils/mocks/createBlockTuneMock.ts",
"retrieved_chunk": " data?: Record<string, unknown>,\n}): BlockTune {\n return new BlockTune(\n {\n name: name || createBlockTuneName('aligning'),\n data: data || {},\n }\n );\n}",
"score": 16.281937807922755
}
] | typescript | name: createBlockTuneName('styling'),
data: { |
import { EditorDocument } from './index';
import { BlockNode } from '../BlockNode';
import { createBlockNodeMock } from '../../utils/mocks/createBlockNodeMock';
import { createEditorDocumentMock } from '../../utils/mocks/createEditorDocumentMock';
/**
* Creates an EditorDocument object with some blocks for tests.
*/
function createEditorDocumentMockWithSomeBlocks(): EditorDocument {
const document = createEditorDocumentMock();
const countOfBlocks = 3;
for (let i = 0; i < countOfBlocks; i++) {
const block = createBlockNodeMock({
parent: document,
});
document.addBlock(block);
}
return document;
}
describe('EditorDocument', () => {
describe('.length', () => {
it('should return the number of blocks in the document', () => {
// Arrange
const blocksCount = 3;
const document = new EditorDocument({
children: [],
properties: {
readOnly: false,
},
});
for (let i = 0; i < blocksCount; i++) {
const block = createBlockNodeMock({
parent: document,
});
document.addBlock(block);
}
// Act
const actual = document.length;
// Assert
expect(actual).toBe(blocksCount);
});
});
describe('.addBlock()', () => {
it('should add the block to the end of the document if index is not provided', () => {
// Arrange
const document = createEditorDocumentMockWithSomeBlocks();
const block = createBlockNodeMock({
parent: document,
});
// Act
document.addBlock(block);
// Assert
const lastBlock = document. | getBlock(document.length - 1); |
expect(lastBlock).toBe(block);
});
it('should add the block to the beginning of the document if index is 0', () => {
// Arrange
const document = createEditorDocumentMockWithSomeBlocks();
const block = createBlockNodeMock({
parent: document,
});
// Act
document.addBlock(block, 0);
// Assert
expect(document.getBlock(0)).toBe(block);
});
it('should add the block to the specified index in the middle of the document', () => {
// Arrange
const document = createEditorDocumentMockWithSomeBlocks();
const block = createBlockNodeMock({
parent: document,
});
// Act
document.addBlock(block, 1);
// Assert
expect(document.getBlock(1)).toBe(block);
});
it('should add the block to the end of the document if the index after the last element is passed', () => {
// Arrange
const document = createEditorDocumentMockWithSomeBlocks();
const block = createBlockNodeMock({
parent: document,
});
// Act
document.addBlock(block, document.length);
// Assert
const lastBlock = document.getBlock(document.length - 1);
expect(lastBlock).toBe(block);
});
it('should throw an error if index is less then 0', () => {
// Arrange
const document = createEditorDocumentMockWithSomeBlocks();
const block = createBlockNodeMock({
parent: document,
});
// Act
const action = (): void => document.addBlock(block, -1);
// Assert
expect(action).toThrowError('Index out of bounds');
});
it('should throw an error if index is greater then document length', () => {
// Arrange
const document = createEditorDocumentMockWithSomeBlocks();
const block = createBlockNodeMock({
parent: document,
});
// Act
const action = (): void => document.addBlock(block, document.length + 1);
// Assert
expect(action).toThrowError('Index out of bounds');
});
});
describe('.removeBlock()', () => {
it('should remove the block from the beginning of the document if index 0 is passed', () => {
// Arrange
const document = createEditorDocumentMockWithSomeBlocks();
const block = document.getBlock(0);
// Act
document.removeBlock(0);
// Assert
expect(document.getBlock(0)).not.toBe(block);
});
it('should remove the block from the specified index in the middle of the document', () => {
// Arrange
const document = createEditorDocumentMockWithSomeBlocks();
const block = document.getBlock(1);
// Act
document.removeBlock(1);
// Assert
expect(document.getBlock(1)).not.toBe(block);
});
it('should remove the block from the end of the document if the last index is passed', () => {
// Arrange
const document = createEditorDocumentMockWithSomeBlocks();
const documentLengthBeforeRemove = document.length;
// Act
document.removeBlock(document.length - 1);
// Assert
expect(document.length).toBe(documentLengthBeforeRemove - 1);
});
it('should throw an error if index is less then 0', () => {
// Arrange
const document = createEditorDocumentMockWithSomeBlocks();
// Act
const action = (): void => document.removeBlock(-1);
// Assert
expect(action).toThrowError('Index out of bounds');
});
it('should throw an error if index is greater then document length', () => {
// Arrange
const document = createEditorDocumentMockWithSomeBlocks();
// Act
const action = (): void => document.removeBlock(document.length);
// Assert
expect(action).toThrowError('Index out of bounds');
});
});
describe('.getBlock()', () => {
it('should return the block from the specific index', () => {
// Arrange
const document = createEditorDocumentMock();
const countOfBlocks = 3;
const blocks: BlockNode[] = [];
for (let i = 0; i < countOfBlocks; i++) {
const block = createBlockNodeMock({
parent: document,
});
document.addBlock(block);
blocks.push(block);
}
const index = 1;
// Act
const block = document.getBlock(index);
// Assert
expect(block).toBe(blocks[index]);
});
it('should throw an error if index is less then 0', () => {
// Arrange
const document = createEditorDocumentMockWithSomeBlocks();
// Act
const action = (): BlockNode => document.getBlock(-1);
// Assert
expect(action).toThrowError('Index out of bounds');
});
it('should throw an error if index is greater then document length', () => {
// Arrange
const document = createEditorDocumentMockWithSomeBlocks();
// Act
const action = (): BlockNode => document.getBlock(document.length);
// Assert
expect(action).toThrowError('Index out of bounds');
});
});
});
| src/entities/EditorDocument/EditorDocument.spec.ts | editor-js-document-model-4cb9623 | [
{
"filename": "src/entities/EditorDocument/types/PropName.ts",
"retrieved_chunk": "/**\n * Enum with available props to customise document state and behaviour\n */\nexport enum PropName {\n /**\n * Read-only mode for the Editor\n */\n ReadOnly = 'readOnly',\n}",
"score": 20.99438448117006
},
{
"filename": "src/entities/EditorDocument/index.ts",
"retrieved_chunk": "import { BlockNode } from '../BlockNode';\nimport { EditorDocumentConstructorParameters, PropName } from './types';\n/**\n * EditorDocument class represents the top-level container for a tree-like structure of BlockNodes in an editor document.\n * It contains an array of BlockNodes representing the root-level nodes of the document.\n */\nexport class EditorDocument {\n /**\n * Private field representing the child BlockNodes of the EditorDocument\n */",
"score": 20.940676710001526
},
{
"filename": "src/entities/EditorDocument/types/EditorDocumentConstructorParameters.ts",
"retrieved_chunk": "import { BlockNode } from '../../BlockNode';\nimport { PropName } from './PropName';\nexport interface EditorDocumentConstructorParameters {\n /**\n * The child BlockNodes of the EditorDocument\n */\n children: BlockNode[];\n /**\n * The properties of the document\n */",
"score": 19.99783228352892
},
{
"filename": "src/entities/EditorDocument/index.ts",
"retrieved_chunk": " * @param args.properties - The properties of the document.\n */\n constructor({ children, properties }: EditorDocumentConstructorParameters) {\n this.#children = children;\n this.#properties = properties;\n }\n /**\n * Returns count of child BlockNodes of the EditorDocument.\n */\n public get length(): number {",
"score": 19.745330364120615
},
{
"filename": "src/entities/EditorDocument/index.ts",
"retrieved_chunk": " #children: BlockNode[];\n /**\n * Private field representing the properties of the document\n */\n #properties: Record<PropName, unknown>;\n /**\n * Constructor for EditorDocument class.\n *\n * @param args - EditorDocument constructor arguments.\n * @param args.children - The child BlockNodes of the EditorDocument.",
"score": 17.750918230419334
}
] | typescript | getBlock(document.length - 1); |
import { describe, it, expect, beforeEach, jest } from '@jest/globals';
import { ParentNode } from './ParentNode';
import type { ChildNode } from './ChildNode';
const createChildMock = (): ChildNode => {
return {
appendTo: jest.fn(),
remove: jest.fn(),
parent: null,
} as unknown as ChildNode;
};
interface Dummy extends ParentNode {
}
/**
*
*/
@ParentNode
class Dummy {
/**
*
* @param _options - dummy options
*/
// eslint-disable-next-line @typescript-eslint/no-empty-function,@typescript-eslint/no-unused-vars,no-unused-vars
constructor(_options?: unknown) {}
}
describe('ParentNode decorator', () => {
let dummy: Dummy;
beforeEach(() => {
dummy = new Dummy();
jest.resetAllMocks();
});
it('should add removeChild method to the decorated class', () => {
expect(dummy.removeChild).toBeInstanceOf(Function);
});
it('should add append method to the decorated class', () => {
expect(dummy.append).toBeInstanceOf(Function);
});
it('should add insertAfter method to the decorated class', () => {
| expect(dummy.insertAfter).toBeInstanceOf(Function); |
});
describe('constructor', () => {
it('should append passed children to new parent', () => {
const childMock = createChildMock();
dummy = new Dummy({
children: [ childMock ],
});
expect(childMock.appendTo).toBeCalledWith(dummy);
});
});
describe('.children', () => {
it('should return empty array by default', () => {
expect(dummy.children).toEqual([]);
});
it('should return children passed via constructor', () => {
const childMock = createChildMock();
dummy = new Dummy({
children: [ childMock ],
});
expect(dummy.children).toEqual([ childMock ]);
});
});
describe('.append()', () => {
it('should add child to the children array', () => {
const childMock = createChildMock();
dummy.append(childMock);
expect(dummy.children).toContain(childMock);
});
it('should add several children to the children array', () => {
const childMock = createChildMock();
const anotherChildMock = createChildMock();
dummy.append(childMock, anotherChildMock);
expect(dummy.children).toEqual([childMock, anotherChildMock]);
});
it('should move a child to the end of children array if it is already there', () => {
const childMock = createChildMock();
const anotherChildMock = createChildMock();
const oneMoreChildMock = createChildMock();
dummy = new Dummy({
children: [childMock, anotherChildMock, oneMoreChildMock],
});
dummy.append(anotherChildMock);
expect(dummy.children).toEqual([childMock, oneMoreChildMock, anotherChildMock]);
});
it('should preserve already existing children', () => {
const childMock = createChildMock();
const anotherChildMock = createChildMock();
const oneMoreChildMock = createChildMock();
dummy = new Dummy({
children: [childMock, anotherChildMock],
});
dummy.append(oneMoreChildMock);
expect(dummy.children).toEqual([childMock, anotherChildMock, oneMoreChildMock]);
});
});
describe('.insertAfter()', () => {
it('should insert a child after passed target', () => {
const childMock = createChildMock();
const anotherChildMock = createChildMock();
const childMockToInsert = createChildMock();
dummy = new Dummy({
children: [childMock, anotherChildMock],
});
dummy.insertAfter(childMock, childMockToInsert);
expect(dummy.children).toEqual([childMock, childMockToInsert, anotherChildMock]);
});
it('should insert several children after passed target', () => {
const childMock = createChildMock();
const anotherChildMock = createChildMock();
const childMockToInsert = createChildMock();
const anotherChildMockToInsert = createChildMock();
dummy = new Dummy({
children: [childMock, anotherChildMock],
});
dummy.insertAfter(childMock, childMockToInsert, anotherChildMockToInsert);
expect(dummy.children).toEqual([childMock, childMockToInsert, anotherChildMockToInsert, anotherChildMock]);
});
it('should remove existing child and insert it to the new place', () => {
const childMock = createChildMock();
const anotherChildMock = createChildMock();
const oneMoreChildMock = createChildMock();
const childMockToInsert = createChildMock();
dummy = new Dummy({
children: [childMock, anotherChildMock, oneMoreChildMock, childMockToInsert],
});
dummy.insertAfter(anotherChildMock, childMockToInsert);
expect(dummy.children).toEqual([childMock, anotherChildMock, childMockToInsert, oneMoreChildMock]);
});
});
describe('.removeChild()', () => {
it('should remove child from the children array', () => {
const childMock = createChildMock();
dummy = new Dummy({
children: [ childMock ],
});
dummy.removeChild(childMock);
expect(dummy.children).toHaveLength(0);
});
it('should call remove method of child', () => {
const childMock = createChildMock();
dummy = new Dummy({
children: [ childMock ],
});
dummy.removeChild(childMock);
expect(childMock.remove).toBeCalled();
});
});
});
| src/entities/interfaces/ParentNode.spec.ts | editor-js-document-model-4cb9623 | [
{
"filename": "src/entities/interfaces/ChildNode.spec.ts",
"retrieved_chunk": " expect(parentMock.append).toBeCalledWith(dummy);\n });\n it('should add remove method to the decorated class', () => {\n expect(dummy.remove).toBeInstanceOf(Function);\n });\n it('should add appendTo method to the decorated class', () => {\n expect(dummy.appendTo).toBeInstanceOf(Function);\n });\n describe('.parent', () => {\n it('should return null by default', () => {",
"score": 116.08412821353743
},
{
"filename": "src/entities/interfaces/ChildNode.spec.ts",
"retrieved_chunk": " dummy = new Dummy({\n parent: parentMock,\n });\n });\n it('should call parent\\'s removeChild method', () => {\n dummy.remove();\n expect(parentMock.removeChild).toBeCalledWith(dummy);\n });\n it('should set node\\'s parent to null', () => {\n dummy.remove();",
"score": 50.74283283728678
},
{
"filename": "src/entities/interfaces/ChildNode.spec.ts",
"retrieved_chunk": " expect(dummy.parent).toBeNull();\n });\n });\n describe('.appendTo()', () => {\n beforeEach(() => {\n dummy = new Dummy();\n });\n it('should call parent\\'s append method on appendTo call', () => {\n dummy.appendTo(parentMock);\n expect(parentMock.append).toBeCalledWith(dummy);",
"score": 45.35354676399068
},
{
"filename": "src/entities/interfaces/ChildNode.spec.ts",
"retrieved_chunk": " // eslint-disable-next-line @typescript-eslint/no-empty-function,@typescript-eslint/no-unused-vars,no-unused-vars\n constructor(_options?: unknown) {}\n}\ndescribe('ChildNode decorator', () => {\n let dummy: Dummy;\n beforeEach(() => {\n jest.resetAllMocks();\n });\n it('should decorated class to a parent', () => {\n dummy = new Dummy({ parent: parentMock });",
"score": 43.43651962139826
},
{
"filename": "src/entities/interfaces/ChildNode.spec.ts",
"retrieved_chunk": " });\n it('should set node\\'s parent on appendTo call', () => {\n dummy.appendTo(parentMock);\n expect(dummy.parent).toBe(parentMock);\n });\n it('should do nothing if parents are the same', () => {\n const dummyWithParent = new Dummy({\n parent: parentMock,\n });\n jest.resetAllMocks();",
"score": 29.343251814209232
}
] | typescript | expect(dummy.insertAfter).toBeInstanceOf(Function); |
import { EditorDocument } from './index';
import { BlockNode } from '../BlockNode';
import { createBlockNodeMock } from '../../utils/mocks/createBlockNodeMock';
import { createEditorDocumentMock } from '../../utils/mocks/createEditorDocumentMock';
/**
* Creates an EditorDocument object with some blocks for tests.
*/
function createEditorDocumentMockWithSomeBlocks(): EditorDocument {
const document = createEditorDocumentMock();
const countOfBlocks = 3;
for (let i = 0; i < countOfBlocks; i++) {
const block = createBlockNodeMock({
parent: document,
});
document.addBlock(block);
}
return document;
}
describe('EditorDocument', () => {
describe('.length', () => {
it('should return the number of blocks in the document', () => {
// Arrange
const blocksCount = 3;
const document = new EditorDocument({
children: [],
properties: {
readOnly: false,
},
});
for (let i = 0; i < blocksCount; i++) {
const block = createBlockNodeMock({
parent: document,
});
document.addBlock(block);
}
// Act
const actual = document.length;
// Assert
expect(actual).toBe(blocksCount);
});
});
describe('.addBlock()', () => {
it('should add the block to the end of the document if index is not provided', () => {
// Arrange
const document = createEditorDocumentMockWithSomeBlocks();
const block = createBlockNodeMock({
parent: document,
});
// Act
document.addBlock(block);
// Assert
const lastBlock = document.getBlock(document.length - 1);
expect(lastBlock).toBe(block);
});
it('should add the block to the beginning of the document if index is 0', () => {
// Arrange
const document = createEditorDocumentMockWithSomeBlocks();
const block = createBlockNodeMock({
parent: document,
});
// Act
document.addBlock(block, 0);
// Assert
expect(document.getBlock(0)).toBe(block);
});
it('should add the block to the specified index in the middle of the document', () => {
// Arrange
const document = createEditorDocumentMockWithSomeBlocks();
const block = createBlockNodeMock({
parent: document,
});
// Act
document.addBlock(block, 1);
// Assert
expect(document.getBlock(1)).toBe(block);
});
it('should add the block to the end of the document if the index after the last element is passed', () => {
// Arrange
const document = createEditorDocumentMockWithSomeBlocks();
const block = createBlockNodeMock({
parent: document,
});
// Act
document.addBlock(block, document.length);
// Assert
const lastBlock = document.getBlock(document.length - 1);
expect(lastBlock).toBe(block);
});
it('should throw an error if index is less then 0', () => {
// Arrange
const document = createEditorDocumentMockWithSomeBlocks();
const block = createBlockNodeMock({
parent: document,
});
// Act
const action = (): void => document.addBlock(block, -1);
// Assert
expect(action).toThrowError('Index out of bounds');
});
it('should throw an error if index is greater then document length', () => {
// Arrange
const document = createEditorDocumentMockWithSomeBlocks();
const block = createBlockNodeMock({
parent: document,
});
// Act
const action = (): void => document.addBlock(block, document.length + 1);
// Assert
expect(action).toThrowError('Index out of bounds');
});
});
describe('.removeBlock()', () => {
it('should remove the block from the beginning of the document if index 0 is passed', () => {
// Arrange
const document = createEditorDocumentMockWithSomeBlocks();
const block = document.getBlock(0);
// Act
| document.removeBlock(0); |
// Assert
expect(document.getBlock(0)).not.toBe(block);
});
it('should remove the block from the specified index in the middle of the document', () => {
// Arrange
const document = createEditorDocumentMockWithSomeBlocks();
const block = document.getBlock(1);
// Act
document.removeBlock(1);
// Assert
expect(document.getBlock(1)).not.toBe(block);
});
it('should remove the block from the end of the document if the last index is passed', () => {
// Arrange
const document = createEditorDocumentMockWithSomeBlocks();
const documentLengthBeforeRemove = document.length;
// Act
document.removeBlock(document.length - 1);
// Assert
expect(document.length).toBe(documentLengthBeforeRemove - 1);
});
it('should throw an error if index is less then 0', () => {
// Arrange
const document = createEditorDocumentMockWithSomeBlocks();
// Act
const action = (): void => document.removeBlock(-1);
// Assert
expect(action).toThrowError('Index out of bounds');
});
it('should throw an error if index is greater then document length', () => {
// Arrange
const document = createEditorDocumentMockWithSomeBlocks();
// Act
const action = (): void => document.removeBlock(document.length);
// Assert
expect(action).toThrowError('Index out of bounds');
});
});
describe('.getBlock()', () => {
it('should return the block from the specific index', () => {
// Arrange
const document = createEditorDocumentMock();
const countOfBlocks = 3;
const blocks: BlockNode[] = [];
for (let i = 0; i < countOfBlocks; i++) {
const block = createBlockNodeMock({
parent: document,
});
document.addBlock(block);
blocks.push(block);
}
const index = 1;
// Act
const block = document.getBlock(index);
// Assert
expect(block).toBe(blocks[index]);
});
it('should throw an error if index is less then 0', () => {
// Arrange
const document = createEditorDocumentMockWithSomeBlocks();
// Act
const action = (): BlockNode => document.getBlock(-1);
// Assert
expect(action).toThrowError('Index out of bounds');
});
it('should throw an error if index is greater then document length', () => {
// Arrange
const document = createEditorDocumentMockWithSomeBlocks();
// Act
const action = (): BlockNode => document.getBlock(document.length);
// Assert
expect(action).toThrowError('Index out of bounds');
});
});
});
| src/entities/EditorDocument/EditorDocument.spec.ts | editor-js-document-model-4cb9623 | [
{
"filename": "src/entities/EditorDocument/index.ts",
"retrieved_chunk": " * Checks if the index is out of bounds.\n *\n * @param index - The index to check\n * @param max - The maximum index value. Defaults to the length of the children array.\n * @throws Error if the index is out of bounds\n */\n #checkIndexOutOfBounds(index: number, max: number = this.length): void {\n if (index < 0 || index > max) {\n throw new Error('Index out of bounds');\n }",
"score": 26.089178765752195
},
{
"filename": "src/entities/EditorDocument/index.ts",
"retrieved_chunk": " *\n * @param index - The index of the BlockNode to remove\n * @throws Error if the index is out of bounds\n */\n public removeBlock(index: number): void {\n this.#checkIndexOutOfBounds(index, this.length - 1);\n this.#children.splice(index, 1);\n }\n /**\n * Returns the BlockNode at the specified index.",
"score": 25.006481315961604
},
{
"filename": "src/entities/EditorDocument/index.ts",
"retrieved_chunk": " * Throws an error if the index is out of bounds.\n *\n * @param index - The index of the BlockNode to return\n * @throws Error if the index is out of bounds\n */\n public getBlock(index: number): BlockNode {\n this.#checkIndexOutOfBounds(index, this.length - 1);\n return this.#children[index];\n }\n /**",
"score": 23.128100612937477
},
{
"filename": "src/entities/EditorDocument/index.ts",
"retrieved_chunk": "import { BlockNode } from '../BlockNode';\nimport { EditorDocumentConstructorParameters, PropName } from './types';\n/**\n * EditorDocument class represents the top-level container for a tree-like structure of BlockNodes in an editor document.\n * It contains an array of BlockNodes representing the root-level nodes of the document.\n */\nexport class EditorDocument {\n /**\n * Private field representing the child BlockNodes of the EditorDocument\n */",
"score": 20.753297152640076
},
{
"filename": "src/entities/EditorDocument/types/EditorDocumentConstructorParameters.ts",
"retrieved_chunk": "import { BlockNode } from '../../BlockNode';\nimport { PropName } from './PropName';\nexport interface EditorDocumentConstructorParameters {\n /**\n * The child BlockNodes of the EditorDocument\n */\n children: BlockNode[];\n /**\n * The properties of the document\n */",
"score": 20.209709354822714
}
] | typescript | document.removeBlock(0); |
import { beforeEach, describe, expect, it } from '@jest/globals';
import { ParentNode } from '../interfaces';
import { createInlineToolData, createInlineToolName, FormattingNode } from './index';
import { TextNode } from '../TextNode';
const parentMock = {
insertAfter: jest.fn(),
removeChild: jest.fn(),
append: jest.fn(),
children: [],
} as unknown as ParentNode;
const createChildMock = (value: string): TextNode => ({
getText: jest.fn(() => value),
appendTo: jest.fn(),
insertText: jest.fn(),
removeText: jest.fn(),
split: jest.fn(() => null),
format: jest.fn(() => [ new FormattingNode({ tool: createInlineToolName('tool') }) ]),
length: value.length,
} as unknown as TextNode);
describe('FormattingNode', () => {
const childMock = createChildMock('Some text here. ');
const anotherChildMock = createChildMock('Another text here.');
const tool = createInlineToolName('bold');
const anotherTool = createInlineToolName('italic');
const data = createInlineToolData({});
let node: FormattingNode;
beforeEach(() => {
node = new FormattingNode({
tool,
data,
parent: parentMock as FormattingNode,
children: [childMock, anotherChildMock],
});
jest.clearAllMocks();
});
describe('.length', () => {
it('should return sum of lengths of children', () => {
| expect(node.length).toEqual(childMock.length + anotherChildMock.length); |
});
});
describe('.serialized', () => {
it('should return concatenated text of all fragments with fragments list describing formatting', () => {
const result = node.serialized;
expect(result).toEqual({
text: childMock.getText() + anotherChildMock.getText(),
fragments: [
{
tool,
data,
range: [0, node.length],
},
],
});
});
});
describe('.insertText()', () => {
const newText = 'new text';
const index = 3;
it('should lead calling insertText() of the child with the passed index', () => {
node.insertText(newText, index);
expect(childMock.insertText).toBeCalledWith(newText, index);
});
it('should adjust index by child offset', () => {
const offset = childMock.length;
node.insertText(newText, index + offset);
expect(anotherChildMock.insertText).toBeCalledWith(newText, index);
});
it('should append text to the last child by default', () => {
node.insertText(newText);
expect(anotherChildMock.insertText).toBeCalledWith(newText, anotherChildMock.length);
});
});
describe('.removeText()', () => {
const start = 3;
const end = 5;
it('should remove text from appropriate child', () => {
node.removeText(start, end);
expect(childMock.removeText).toBeCalledWith(start, end);
});
it('should adjust indices by child offset', () => {
const offset = childMock.length;
node.removeText(offset + start, offset + end);
expect(anotherChildMock.removeText).toBeCalledWith(start, end);
});
it('should call removeText for each affected child', () => {
const offset = childMock.length;
node.removeText(start, offset + end);
expect(childMock.removeText).toBeCalledWith(start, offset);
expect(anotherChildMock.removeText).toBeCalledWith(0, end);
});
it('should remove all text by default', () => {
node.removeText();
expect(childMock.removeText).toBeCalledWith(0, childMock.length);
expect(anotherChildMock.removeText).toBeCalledWith(0, anotherChildMock.length);
});
it('should call remove() if length is 0 after removeText() call', () => {
const removeSpy = jest.spyOn(node, 'remove');
const lengthSpy = jest.spyOn(node, 'length', 'get').mockImplementation(() => 0);
node.removeText();
expect(removeSpy).toBeCalled();
removeSpy.mockRestore();
lengthSpy.mockRestore();
});
});
describe('.getText()', () => {
const start = 3;
const end = 5;
it('should call getText() for the relevant child', () => {
node.getText(start, end);
expect(childMock.getText).toBeCalledWith(start, end);
});
it('should adjust index by child offset', () => {
const offset = childMock.length;
node.getText(offset + start, offset + end);
expect(anotherChildMock.getText).toBeCalledWith(start, end);
});
it('should call getText for all relevant children', () => {
const offset = childMock.length;
node.getText(start, offset + end);
expect(childMock.getText).toBeCalledWith(start, offset);
expect(anotherChildMock.getText).toBeCalledWith(0, end);
});
it('should return all text by default', () => {
node.getText();
expect(childMock.getText).toBeCalledWith(0, childMock.length);
expect(anotherChildMock.getText).toBeCalledWith(0, anotherChildMock.length);
});
});
describe('.getFragments()', () => {
/**
* @todo
*/
it.todo('should return fragments for sub-tree');
it('should return node\'s fragment', () => {
const fragments = node.getFragments();
expect(fragments).toEqual([
{
tool,
data,
range: [0, node.length],
},
]);
});
});
describe('.split()', () => {
const index = 5;
it('should not split (return null) if index is 0', () => {
const newNode = node.split(0);
expect(newNode).toBeNull();
});
it('should not split (return null) if index equals text length', () => {
const newNode = node.split(node.length);
expect(newNode).toBeNull();
});
it('should create new FormattingNode on split', () => {
const newNode = node.split(index);
expect(newNode).toBeInstanceOf(FormattingNode);
});
/**
* @todo check this and related cases with integration tests
*/
it('should create new FormattingNode with children split from the original one', () => {
const newNode = node.split(childMock.length);
expect(newNode?.children).toEqual([ anotherChildMock ]);
});
it('should call split method of child containing the specified index', () => {
node.split(index);
expect(childMock.split).toBeCalledWith(index);
});
it('should insert new node to the parent', () => {
const newNode = node.split(index);
expect(parentMock.insertAfter).toBeCalledWith(node, newNode);
});
});
describe('.format()', () => {
const start = 3;
const end = 5;
it('should apply formatting to the relevant child', () => {
node.format(anotherTool, start, end);
expect(childMock.format).toBeCalledWith(anotherTool, start, end, undefined);
});
it('should adjust index by child offset', () => {
const offset = childMock.length;
node.format(anotherTool, offset + start, offset + end);
expect(anotherChildMock.format).toBeCalledWith(anotherTool, start, end, undefined);
});
it('should format all relevant children', () => {
const offset = childMock.length;
node.format(anotherTool, start, offset + end);
expect(childMock.format).toBeCalledWith(anotherTool, start, offset, undefined);
expect(anotherChildMock.format).toBeCalledWith(anotherTool, 0, end, undefined);
});
it('should do nothing if same tool is being applied', () => {
node.format(tool, start, end);
expect(childMock.format).not.toBeCalled();
expect(anotherChildMock.format).not.toBeCalled();
});
it('should return empty array if same tool is being applied', () => {
const result = node.format(tool, start, end);
expect(result).toHaveLength(0);
});
it('should return array of new formatting nodes', () => {
const result = node.format(anotherTool, start, end);
expect(result).toEqual(childMock.format(anotherTool, start, end));
});
});
});
| src/entities/FormattingNode/FormattingNode.spec.ts | editor-js-document-model-4cb9623 | [
{
"filename": "src/entities/interfaces/ParentNode.spec.ts",
"retrieved_chunk": " dummy = new Dummy({\n children: [childMock, anotherChildMock, oneMoreChildMock, childMockToInsert],\n });\n dummy.insertAfter(anotherChildMock, childMockToInsert);\n expect(dummy.children).toEqual([childMock, anotherChildMock, childMockToInsert, oneMoreChildMock]);\n });\n });\n describe('.removeChild()', () => {\n it('should remove child from the children array', () => {\n const childMock = createChildMock();",
"score": 31.80896296207624
},
{
"filename": "src/entities/interfaces/ParentNode.spec.ts",
"retrieved_chunk": " expect(dummy.children).toEqual([childMock, anotherChildMock, oneMoreChildMock]);\n });\n });\n describe('.insertAfter()', () => {\n it('should insert a child after passed target', () => {\n const childMock = createChildMock();\n const anotherChildMock = createChildMock();\n const childMockToInsert = createChildMock();\n dummy = new Dummy({\n children: [childMock, anotherChildMock],",
"score": 31.417333371785737
},
{
"filename": "src/entities/interfaces/ParentNode.spec.ts",
"retrieved_chunk": " expect(dummy.children).toEqual([childMock, oneMoreChildMock, anotherChildMock]);\n });\n it('should preserve already existing children', () => {\n const childMock = createChildMock();\n const anotherChildMock = createChildMock();\n const oneMoreChildMock = createChildMock();\n dummy = new Dummy({\n children: [childMock, anotherChildMock],\n });\n dummy.append(oneMoreChildMock);",
"score": 30.642865442535967
},
{
"filename": "src/entities/interfaces/ParentNode.spec.ts",
"retrieved_chunk": " expect(dummy.children).toEqual([childMock, anotherChildMock]);\n });\n it('should move a child to the end of children array if it is already there', () => {\n const childMock = createChildMock();\n const anotherChildMock = createChildMock();\n const oneMoreChildMock = createChildMock();\n dummy = new Dummy({\n children: [childMock, anotherChildMock, oneMoreChildMock],\n });\n dummy.append(anotherChildMock);",
"score": 30.34143391527249
},
{
"filename": "src/entities/interfaces/ParentNode.spec.ts",
"retrieved_chunk": " describe('.append()', () => {\n it('should add child to the children array', () => {\n const childMock = createChildMock();\n dummy.append(childMock);\n expect(dummy.children).toContain(childMock);\n });\n it('should add several children to the children array', () => {\n const childMock = createChildMock();\n const anotherChildMock = createChildMock();\n dummy.append(childMock, anotherChildMock);",
"score": 29.23596726516075
}
] | typescript | expect(node.length).toEqual(childMock.length + anotherChildMock.length); |
import { beforeEach, describe, expect, it } from '@jest/globals';
import { ParentNode } from '../interfaces';
import { createInlineToolData, createInlineToolName, FormattingNode } from './index';
import { TextNode } from '../TextNode';
const parentMock = {
insertAfter: jest.fn(),
removeChild: jest.fn(),
append: jest.fn(),
children: [],
} as unknown as ParentNode;
const createChildMock = (value: string): TextNode => ({
getText: jest.fn(() => value),
appendTo: jest.fn(),
insertText: jest.fn(),
removeText: jest.fn(),
split: jest.fn(() => null),
format: jest.fn(() => [ new FormattingNode({ tool: createInlineToolName('tool') }) ]),
length: value.length,
} as unknown as TextNode);
describe('FormattingNode', () => {
const childMock = createChildMock('Some text here. ');
const anotherChildMock = createChildMock('Another text here.');
const tool = createInlineToolName('bold');
const anotherTool = createInlineToolName('italic');
const data = createInlineToolData({});
let node: FormattingNode;
beforeEach(() => {
node = new FormattingNode({
tool,
data,
parent: parentMock as FormattingNode,
children: [childMock, anotherChildMock],
});
jest.clearAllMocks();
});
describe('.length', () => {
it('should return sum of lengths of children', () => {
expect(node.length).toEqual(childMock.length + anotherChildMock.length);
});
});
describe('.serialized', () => {
it('should return concatenated text of all fragments with fragments list describing formatting', () => {
const result = node.serialized;
expect(result).toEqual({
text: childMock.getText() + anotherChildMock.getText(),
fragments: [
{
tool,
data,
range: [0, node.length],
},
],
});
});
});
describe('.insertText()', () => {
const newText = 'new text';
const index = 3;
it('should lead calling insertText() of the child with the passed index', () => {
node.insertText(newText, index);
expect(childMock | .insertText).toBeCalledWith(newText, index); |
});
it('should adjust index by child offset', () => {
const offset = childMock.length;
node.insertText(newText, index + offset);
expect(anotherChildMock.insertText).toBeCalledWith(newText, index);
});
it('should append text to the last child by default', () => {
node.insertText(newText);
expect(anotherChildMock.insertText).toBeCalledWith(newText, anotherChildMock.length);
});
});
describe('.removeText()', () => {
const start = 3;
const end = 5;
it('should remove text from appropriate child', () => {
node.removeText(start, end);
expect(childMock.removeText).toBeCalledWith(start, end);
});
it('should adjust indices by child offset', () => {
const offset = childMock.length;
node.removeText(offset + start, offset + end);
expect(anotherChildMock.removeText).toBeCalledWith(start, end);
});
it('should call removeText for each affected child', () => {
const offset = childMock.length;
node.removeText(start, offset + end);
expect(childMock.removeText).toBeCalledWith(start, offset);
expect(anotherChildMock.removeText).toBeCalledWith(0, end);
});
it('should remove all text by default', () => {
node.removeText();
expect(childMock.removeText).toBeCalledWith(0, childMock.length);
expect(anotherChildMock.removeText).toBeCalledWith(0, anotherChildMock.length);
});
it('should call remove() if length is 0 after removeText() call', () => {
const removeSpy = jest.spyOn(node, 'remove');
const lengthSpy = jest.spyOn(node, 'length', 'get').mockImplementation(() => 0);
node.removeText();
expect(removeSpy).toBeCalled();
removeSpy.mockRestore();
lengthSpy.mockRestore();
});
});
describe('.getText()', () => {
const start = 3;
const end = 5;
it('should call getText() for the relevant child', () => {
node.getText(start, end);
expect(childMock.getText).toBeCalledWith(start, end);
});
it('should adjust index by child offset', () => {
const offset = childMock.length;
node.getText(offset + start, offset + end);
expect(anotherChildMock.getText).toBeCalledWith(start, end);
});
it('should call getText for all relevant children', () => {
const offset = childMock.length;
node.getText(start, offset + end);
expect(childMock.getText).toBeCalledWith(start, offset);
expect(anotherChildMock.getText).toBeCalledWith(0, end);
});
it('should return all text by default', () => {
node.getText();
expect(childMock.getText).toBeCalledWith(0, childMock.length);
expect(anotherChildMock.getText).toBeCalledWith(0, anotherChildMock.length);
});
});
describe('.getFragments()', () => {
/**
* @todo
*/
it.todo('should return fragments for sub-tree');
it('should return node\'s fragment', () => {
const fragments = node.getFragments();
expect(fragments).toEqual([
{
tool,
data,
range: [0, node.length],
},
]);
});
});
describe('.split()', () => {
const index = 5;
it('should not split (return null) if index is 0', () => {
const newNode = node.split(0);
expect(newNode).toBeNull();
});
it('should not split (return null) if index equals text length', () => {
const newNode = node.split(node.length);
expect(newNode).toBeNull();
});
it('should create new FormattingNode on split', () => {
const newNode = node.split(index);
expect(newNode).toBeInstanceOf(FormattingNode);
});
/**
* @todo check this and related cases with integration tests
*/
it('should create new FormattingNode with children split from the original one', () => {
const newNode = node.split(childMock.length);
expect(newNode?.children).toEqual([ anotherChildMock ]);
});
it('should call split method of child containing the specified index', () => {
node.split(index);
expect(childMock.split).toBeCalledWith(index);
});
it('should insert new node to the parent', () => {
const newNode = node.split(index);
expect(parentMock.insertAfter).toBeCalledWith(node, newNode);
});
});
describe('.format()', () => {
const start = 3;
const end = 5;
it('should apply formatting to the relevant child', () => {
node.format(anotherTool, start, end);
expect(childMock.format).toBeCalledWith(anotherTool, start, end, undefined);
});
it('should adjust index by child offset', () => {
const offset = childMock.length;
node.format(anotherTool, offset + start, offset + end);
expect(anotherChildMock.format).toBeCalledWith(anotherTool, start, end, undefined);
});
it('should format all relevant children', () => {
const offset = childMock.length;
node.format(anotherTool, start, offset + end);
expect(childMock.format).toBeCalledWith(anotherTool, start, offset, undefined);
expect(anotherChildMock.format).toBeCalledWith(anotherTool, 0, end, undefined);
});
it('should do nothing if same tool is being applied', () => {
node.format(tool, start, end);
expect(childMock.format).not.toBeCalled();
expect(anotherChildMock.format).not.toBeCalled();
});
it('should return empty array if same tool is being applied', () => {
const result = node.format(tool, start, end);
expect(result).toHaveLength(0);
});
it('should return array of new formatting nodes', () => {
const result = node.format(anotherTool, start, end);
expect(result).toEqual(childMock.format(anotherTool, start, end));
});
});
});
| src/entities/FormattingNode/FormattingNode.spec.ts | editor-js-document-model-4cb9623 | [
{
"filename": "src/entities/TextNode/TextNode.spec.ts",
"retrieved_chunk": " node.insertText(text, index);\n expect(node.getText()).toEqual(initialText.slice(0, index) + text + initialText.slice(index));\n });\n it('should throw an error if index is less than 0', () => {\n const f = (): void => node.insertText(text, -1);\n expect(f).toThrowError();\n });\n it('should throw an error if index is greater than node length', () => {\n const f = (): void => node.insertText(text, initialText.length + 1);\n expect(f).toThrowError();",
"score": 32.565487931885414
},
{
"filename": "src/entities/TextNode/TextNode.spec.ts",
"retrieved_chunk": " it('should append text if not empty', () => {\n node.insertText(text);\n expect(node.getText()).toEqual(initialText + text);\n });\n it('should prepend text if index is 0 and node is not empty', () => {\n node.insertText(text, 0);\n expect(node.getText()).toEqual(text + initialText);\n });\n it('should insert text at index if not empty', () => {\n const index = 5;",
"score": 30.56536061626395
},
{
"filename": "src/entities/TextNode/TextNode.spec.ts",
"retrieved_chunk": " it('should have empty value by default', () => {\n node = new TextNode();\n expect(node.getText()).toEqual('');\n });\n describe('.insertText()', () => {\n it('should set text to value if node is empty', () => {\n node = new TextNode();\n node.insertText(text);\n expect(node.getText()).toEqual(text);\n });",
"score": 29.655781017073924
},
{
"filename": "src/entities/FormattingNode/index.ts",
"retrieved_chunk": " }\n /**\n * Inserts text to the specified index, by default appends text to the end of the current value\n *\n * @param text - text to insert\n * @param [index] - char index where to insert text\n */\n public insertText(text: string, index = this.length): void {\n const [child, offset] = this.#findChildByIndex(index);\n child?.insertText(text, index - offset);",
"score": 29.55873160267801
},
{
"filename": "src/entities/TextNode/index.ts",
"retrieved_chunk": " const newNode = new TextNode();\n const text = this.removeText(index);\n newNode.insertText(text);\n this.parent?.insertAfter(this, newNode);\n return newNode;\n }\n /**\n * Validates index\n *\n * @param index - char index to validate",
"score": 26.18288263138676
}
] | typescript | .insertText).toBeCalledWith(newText, index); |
import { beforeEach, describe, expect, it } from '@jest/globals';
import { ParentNode } from '../interfaces';
import { createInlineToolData, createInlineToolName, FormattingNode } from './index';
import { TextNode } from '../TextNode';
const parentMock = {
insertAfter: jest.fn(),
removeChild: jest.fn(),
append: jest.fn(),
children: [],
} as unknown as ParentNode;
const createChildMock = (value: string): TextNode => ({
getText: jest.fn(() => value),
appendTo: jest.fn(),
insertText: jest.fn(),
removeText: jest.fn(),
split: jest.fn(() => null),
format: jest.fn(() => [ new FormattingNode({ tool: createInlineToolName('tool') }) ]),
length: value.length,
} as unknown as TextNode);
describe('FormattingNode', () => {
const childMock = createChildMock('Some text here. ');
const anotherChildMock = createChildMock('Another text here.');
const tool = createInlineToolName('bold');
const anotherTool = createInlineToolName('italic');
const data = createInlineToolData({});
let node: FormattingNode;
beforeEach(() => {
node = new FormattingNode({
tool,
data,
parent: parentMock as FormattingNode,
children: [childMock, anotherChildMock],
});
jest.clearAllMocks();
});
describe('.length', () => {
it('should return sum of lengths of children', () => {
expect(node.length).toEqual(childMock.length + anotherChildMock.length);
});
});
describe('.serialized', () => {
it('should return concatenated text of all fragments with fragments list describing formatting', () => {
const result = node.serialized;
expect(result).toEqual({
text: childMock | .getText() + anotherChildMock.getText(),
fragments: [
{ |
tool,
data,
range: [0, node.length],
},
],
});
});
});
describe('.insertText()', () => {
const newText = 'new text';
const index = 3;
it('should lead calling insertText() of the child with the passed index', () => {
node.insertText(newText, index);
expect(childMock.insertText).toBeCalledWith(newText, index);
});
it('should adjust index by child offset', () => {
const offset = childMock.length;
node.insertText(newText, index + offset);
expect(anotherChildMock.insertText).toBeCalledWith(newText, index);
});
it('should append text to the last child by default', () => {
node.insertText(newText);
expect(anotherChildMock.insertText).toBeCalledWith(newText, anotherChildMock.length);
});
});
describe('.removeText()', () => {
const start = 3;
const end = 5;
it('should remove text from appropriate child', () => {
node.removeText(start, end);
expect(childMock.removeText).toBeCalledWith(start, end);
});
it('should adjust indices by child offset', () => {
const offset = childMock.length;
node.removeText(offset + start, offset + end);
expect(anotherChildMock.removeText).toBeCalledWith(start, end);
});
it('should call removeText for each affected child', () => {
const offset = childMock.length;
node.removeText(start, offset + end);
expect(childMock.removeText).toBeCalledWith(start, offset);
expect(anotherChildMock.removeText).toBeCalledWith(0, end);
});
it('should remove all text by default', () => {
node.removeText();
expect(childMock.removeText).toBeCalledWith(0, childMock.length);
expect(anotherChildMock.removeText).toBeCalledWith(0, anotherChildMock.length);
});
it('should call remove() if length is 0 after removeText() call', () => {
const removeSpy = jest.spyOn(node, 'remove');
const lengthSpy = jest.spyOn(node, 'length', 'get').mockImplementation(() => 0);
node.removeText();
expect(removeSpy).toBeCalled();
removeSpy.mockRestore();
lengthSpy.mockRestore();
});
});
describe('.getText()', () => {
const start = 3;
const end = 5;
it('should call getText() for the relevant child', () => {
node.getText(start, end);
expect(childMock.getText).toBeCalledWith(start, end);
});
it('should adjust index by child offset', () => {
const offset = childMock.length;
node.getText(offset + start, offset + end);
expect(anotherChildMock.getText).toBeCalledWith(start, end);
});
it('should call getText for all relevant children', () => {
const offset = childMock.length;
node.getText(start, offset + end);
expect(childMock.getText).toBeCalledWith(start, offset);
expect(anotherChildMock.getText).toBeCalledWith(0, end);
});
it('should return all text by default', () => {
node.getText();
expect(childMock.getText).toBeCalledWith(0, childMock.length);
expect(anotherChildMock.getText).toBeCalledWith(0, anotherChildMock.length);
});
});
describe('.getFragments()', () => {
/**
* @todo
*/
it.todo('should return fragments for sub-tree');
it('should return node\'s fragment', () => {
const fragments = node.getFragments();
expect(fragments).toEqual([
{
tool,
data,
range: [0, node.length],
},
]);
});
});
describe('.split()', () => {
const index = 5;
it('should not split (return null) if index is 0', () => {
const newNode = node.split(0);
expect(newNode).toBeNull();
});
it('should not split (return null) if index equals text length', () => {
const newNode = node.split(node.length);
expect(newNode).toBeNull();
});
it('should create new FormattingNode on split', () => {
const newNode = node.split(index);
expect(newNode).toBeInstanceOf(FormattingNode);
});
/**
* @todo check this and related cases with integration tests
*/
it('should create new FormattingNode with children split from the original one', () => {
const newNode = node.split(childMock.length);
expect(newNode?.children).toEqual([ anotherChildMock ]);
});
it('should call split method of child containing the specified index', () => {
node.split(index);
expect(childMock.split).toBeCalledWith(index);
});
it('should insert new node to the parent', () => {
const newNode = node.split(index);
expect(parentMock.insertAfter).toBeCalledWith(node, newNode);
});
});
describe('.format()', () => {
const start = 3;
const end = 5;
it('should apply formatting to the relevant child', () => {
node.format(anotherTool, start, end);
expect(childMock.format).toBeCalledWith(anotherTool, start, end, undefined);
});
it('should adjust index by child offset', () => {
const offset = childMock.length;
node.format(anotherTool, offset + start, offset + end);
expect(anotherChildMock.format).toBeCalledWith(anotherTool, start, end, undefined);
});
it('should format all relevant children', () => {
const offset = childMock.length;
node.format(anotherTool, start, offset + end);
expect(childMock.format).toBeCalledWith(anotherTool, start, offset, undefined);
expect(anotherChildMock.format).toBeCalledWith(anotherTool, 0, end, undefined);
});
it('should do nothing if same tool is being applied', () => {
node.format(tool, start, end);
expect(childMock.format).not.toBeCalled();
expect(anotherChildMock.format).not.toBeCalled();
});
it('should return empty array if same tool is being applied', () => {
const result = node.format(tool, start, end);
expect(result).toHaveLength(0);
});
it('should return array of new formatting nodes', () => {
const result = node.format(anotherTool, start, end);
expect(result).toEqual(childMock.format(anotherTool, start, end));
});
});
});
| src/entities/FormattingNode/FormattingNode.spec.ts | editor-js-document-model-4cb9623 | [
{
"filename": "src/entities/TextNode/TextNode.spec.ts",
"retrieved_chunk": " const result = node.serialized;\n expect(result).toEqual({\n text: initialText,\n fragments: [],\n });\n });\n });\n});",
"score": 41.55480099526379
},
{
"filename": "src/entities/TextNode/index.ts",
"retrieved_chunk": " * Returns serialized value of the node\n */\n public get serialized(): InlineNodeSerialized {\n return {\n text: this.getText(),\n // No fragments for text node\n fragments: [],\n };\n }\n /**",
"score": 33.52231249764724
},
{
"filename": "src/entities/FormattingNode/index.ts",
"retrieved_chunk": " return this.children.reduce((sum, child) => sum + child.length, 0);\n }\n /**\n * Returns serialized value of the node: text and formatting fragments\n */\n public get serialized(): InlineNodeSerialized {\n return {\n text: this.getText(),\n fragments: this.getFragments(),\n };",
"score": 32.196876424181355
},
{
"filename": "src/entities/TextNode/TextNode.spec.ts",
"retrieved_chunk": " node.split(index);\n expect(node.getText()).toEqual(initialText.slice(0, index));\n });\n it('should insert new node to the parent', () => {\n const newNode = node.split(index);\n expect(parentMock.insertAfter).toBeCalledWith(node, newNode);\n });\n });\n describe('.serialized', () => {\n it('should return text value and empty array of fragments', () => {",
"score": 27.345650313202988
},
{
"filename": "src/entities/TextNode/TextNode.spec.ts",
"retrieved_chunk": " const end = 5;\n expect(() => node.getText(start, end)).not.toThrowError();\n });\n });\n describe('.removeText()', () => {\n it('should remove all text by default', () => {\n node.removeText();\n expect(node.getText()).toEqual('');\n });\n it('should remove text from specified index', () => {",
"score": 24.916016350137905
}
] | typescript | .getText() + anotherChildMock.getText(),
fragments: [
{ |
import { beforeEach, describe, expect, it } from '@jest/globals';
import { ParentNode } from '../interfaces';
import { createInlineToolData, createInlineToolName, FormattingNode } from './index';
import { TextNode } from '../TextNode';
const parentMock = {
insertAfter: jest.fn(),
removeChild: jest.fn(),
append: jest.fn(),
children: [],
} as unknown as ParentNode;
const createChildMock = (value: string): TextNode => ({
getText: jest.fn(() => value),
appendTo: jest.fn(),
insertText: jest.fn(),
removeText: jest.fn(),
split: jest.fn(() => null),
format: jest.fn(() => [ new FormattingNode({ tool: createInlineToolName('tool') }) ]),
length: value.length,
} as unknown as TextNode);
describe('FormattingNode', () => {
const childMock = createChildMock('Some text here. ');
const anotherChildMock = createChildMock('Another text here.');
const tool = createInlineToolName('bold');
const anotherTool = createInlineToolName('italic');
const data = createInlineToolData({});
let node: FormattingNode;
beforeEach(() => {
node = new FormattingNode({
tool,
data,
parent: parentMock as FormattingNode,
children: [childMock, anotherChildMock],
});
jest.clearAllMocks();
});
describe('.length', () => {
it('should return sum of lengths of children', () => {
expect(node.length).toEqual(childMock.length + anotherChildMock.length);
});
});
describe('.serialized', () => {
it('should return concatenated text of all fragments with fragments list describing formatting', () => {
const result = node.serialized;
expect(result).toEqual({
text: childMock.getText() + anotherChildMock.getText(),
fragments: [
{
tool,
data,
range: [0, node.length],
},
],
});
});
});
describe('.insertText()', () => {
const newText = 'new text';
const index = 3;
it('should lead calling insertText() of the child with the passed index', () => {
node.insertText(newText, index);
expect(childMock.insertText).toBeCalledWith(newText, index);
});
it('should adjust index by child offset', () => {
const offset = childMock.length;
node.insertText(newText, index + offset);
expect(anotherChildMock.insertText).toBeCalledWith(newText, index);
});
it('should append text to the last child by default', () => {
node.insertText(newText);
expect(anotherChildMock.insertText).toBeCalledWith(newText, anotherChildMock.length);
});
});
describe('.removeText()', () => {
const start = 3;
const end = 5;
it('should remove text from appropriate child', () => {
node.removeText(start, end);
expect(childMock.removeText).toBeCalledWith(start, end);
});
it('should adjust indices by child offset', () => {
const offset = childMock.length;
node.removeText(offset + start, offset + end);
expect(anotherChildMock.removeText).toBeCalledWith(start, end);
});
it('should call removeText for each affected child', () => {
const offset = childMock.length;
node.removeText(start, offset + end);
expect(childMock.removeText).toBeCalledWith(start, offset);
expect(anotherChildMock.removeText).toBeCalledWith(0, end);
});
it('should remove all text by default', () => {
node.removeText();
expect(childMock.removeText).toBeCalledWith(0, childMock.length);
expect(anotherChildMock.removeText).toBeCalledWith(0, anotherChildMock.length);
});
it('should call remove() if length is 0 after removeText() call', () => {
const removeSpy = jest.spyOn(node, 'remove');
const lengthSpy = jest.spyOn(node, 'length', 'get').mockImplementation(() => 0);
node.removeText();
expect(removeSpy).toBeCalled();
removeSpy.mockRestore();
lengthSpy.mockRestore();
});
});
describe('.getText()', () => {
const start = 3;
const end = 5;
it('should call getText() for the relevant child', () => {
node.getText(start, end);
expect(childMock.getText).toBeCalledWith(start, end);
});
it('should adjust index by child offset', () => {
const offset = childMock.length;
node.getText(offset + start, offset + end);
expect(anotherChildMock.getText).toBeCalledWith(start, end);
});
it('should call getText for all relevant children', () => {
const offset = childMock.length;
node.getText(start, offset + end);
expect(childMock.getText).toBeCalledWith(start, offset);
expect(anotherChildMock.getText).toBeCalledWith(0, end);
});
it('should return all text by default', () => {
node.getText();
expect(childMock.getText).toBeCalledWith(0, childMock.length);
expect(anotherChildMock.getText).toBeCalledWith(0, anotherChildMock.length);
});
});
describe('.getFragments()', () => {
/**
* @todo
*/
it.todo('should return fragments for sub-tree');
it('should return node\'s fragment', () => {
const fragments = node.getFragments();
expect(fragments).toEqual([
{
tool,
data,
range: [0, node.length],
},
]);
});
});
describe('.split()', () => {
const index = 5;
it('should not split (return null) if index is 0', () => {
const newNode = node.split(0);
expect(newNode).toBeNull();
});
it('should not split (return null) if index equals text length', () => {
const newNode = node.split(node.length);
expect(newNode).toBeNull();
});
it('should create new FormattingNode on split', () => {
const newNode = node.split(index);
expect(newNode).toBeInstanceOf(FormattingNode);
});
/**
* @todo check this and related cases with integration tests
*/
it('should create new FormattingNode with children split from the original one', () => {
const newNode = node.split(childMock.length);
expect(newNode?.children).toEqual([ anotherChildMock ]);
});
it('should call split method of child containing the specified index', () => {
node.split(index);
expect(childMock.split).toBeCalledWith(index);
});
it('should insert new node to the parent', () => {
const newNode = node.split(index);
expect(parentMock.insertAfter).toBeCalledWith(node, newNode);
});
});
describe('.format()', () => {
const start = 3;
const end = 5;
it('should apply formatting to the relevant child', () => {
node.format(anotherTool, start, end);
expect( | childMock.format).toBeCalledWith(anotherTool, start, end, undefined); |
});
it('should adjust index by child offset', () => {
const offset = childMock.length;
node.format(anotherTool, offset + start, offset + end);
expect(anotherChildMock.format).toBeCalledWith(anotherTool, start, end, undefined);
});
it('should format all relevant children', () => {
const offset = childMock.length;
node.format(anotherTool, start, offset + end);
expect(childMock.format).toBeCalledWith(anotherTool, start, offset, undefined);
expect(anotherChildMock.format).toBeCalledWith(anotherTool, 0, end, undefined);
});
it('should do nothing if same tool is being applied', () => {
node.format(tool, start, end);
expect(childMock.format).not.toBeCalled();
expect(anotherChildMock.format).not.toBeCalled();
});
it('should return empty array if same tool is being applied', () => {
const result = node.format(tool, start, end);
expect(result).toHaveLength(0);
});
it('should return array of new formatting nodes', () => {
const result = node.format(anotherTool, start, end);
expect(result).toEqual(childMock.format(anotherTool, start, end));
});
});
});
| src/entities/FormattingNode/FormattingNode.spec.ts | editor-js-document-model-4cb9623 | [
{
"filename": "src/entities/TextNode/TextNode.spec.ts",
"retrieved_chunk": " const formattingNode = fragments[1] as FormattingNode;\n expect(formattingNode.children[0].getText()).toEqual(initialText.slice(start, end));\n });\n it('should call parent\\'s insertAfter with new nodes', () => {\n const name = createInlineToolName('bold');\n const start = 5;\n const end = 8;\n const fragments = node.format(name, start, end);\n expect(parentMock.insertAfter).toBeCalledWith(node, ...fragments);\n });",
"score": 42.7091397432778
},
{
"filename": "src/entities/TextNode/TextNode.spec.ts",
"retrieved_chunk": " const end = 5;\n const fragments = node.format(name, 0, end);\n expect(fragments).toHaveLength(2);\n expect(fragments[0]).toBeInstanceOf(FormattingNode);\n expect(fragments[1]).toBeInstanceOf(TextNode);\n });\n it('should return two fragments if formatting to the end, but not from the start', () => {\n const name = createInlineToolName('bold');\n const start = 5;\n const fragments = node.format(name, start, initialText.length);",
"score": 38.31636146100049
},
{
"filename": "src/entities/TextNode/TextNode.spec.ts",
"retrieved_chunk": " expect(fragments).toHaveLength(3);\n expect(fragments[0]).toBeInstanceOf(TextNode);\n expect(fragments[1]).toBeInstanceOf(FormattingNode);\n expect(fragments[2]).toBeInstanceOf(TextNode);\n });\n it('should return FormattingNode with a TextNode as a child with correct text value', () => {\n const name = createInlineToolName('bold');\n const start = 5;\n const end = 8;\n const fragments = node.format(name, start, end);",
"score": 32.072047595011895
},
{
"filename": "src/entities/TextNode/TextNode.spec.ts",
"retrieved_chunk": " });\n describe('.format()', () => {\n it('should return just one FormattingNode, if formatting full TextNode', () => {\n const name = createInlineToolName('bold');\n const fragments = node.format(name, 0, initialText.length);\n expect(fragments).toHaveLength(1);\n expect(fragments[0]).toBeInstanceOf(FormattingNode);\n });\n it('should return two fragments if formatting from the start, but not to the end', () => {\n const name = createInlineToolName('bold');",
"score": 30.61404340963377
},
{
"filename": "src/entities/TextNode/TextNode.spec.ts",
"retrieved_chunk": " expect(fragments).toHaveLength(2);\n expect(fragments[0]).toBeInstanceOf(TextNode);\n expect(fragments[1]).toBeInstanceOf(FormattingNode);\n });\n it('should return three fragments if formatting in the middle', () => {\n const name = createInlineToolName('bold');\n const start = 5;\n const end = 8;\n const fragments = node.format(name, start, end);\n // eslint-disable-next-line @typescript-eslint/no-magic-numbers",
"score": 30.339771114736457
}
] | typescript | childMock.format).toBeCalledWith(anotherTool, start, end, undefined); |
import {
FormattingNodeConstructorParameters,
InlineToolName,
InlineToolData
} from './types';
import { ChildNode, InlineFragment, InlineNode, InlineNodeSerialized, ParentNode } from '../interfaces';
export * from './types';
/**
* We need to extend FormattingNode interface with ChildNode and ParentNode ones to use the methods from mixins
*/
export interface FormattingNode extends ChildNode, ParentNode {}
/**
* FormattingNode class represents a node in a tree-like structure, used to store and manipulate formatted text content
*/
@ParentNode
@ChildNode
export class FormattingNode implements InlineNode {
/**
* Private field representing the name of the formatting tool applied to the content
*/
#tool: InlineToolName;
/**
* Any additional data associated with the formatting tool
*/
#data?: InlineToolData;
/**
* Constructor for FormattingNode class.
*
* @param args - FormattingNode constructor arguments.
* @param args.tool - The name of the formatting tool applied to the content.
* @param args.data - Any additional data associated with the formatting.
*/
constructor({ tool, data }: FormattingNodeConstructorParameters) {
this.#tool = tool;
this.#data = data;
}
/**
* Returns text value length of current node (including subtree)
*/
public get length(): number {
return this.children.reduce((sum, child) => sum + child.length, 0);
}
/**
* Returns serialized value of the node: text and formatting fragments
*/
public get serialized(): InlineNodeSerialized {
return {
text: this.getText(),
fragments: this.getFragments(),
};
}
/**
* Inserts text to the specified index, by default appends text to the end of the current value
*
* @param text - text to insert
* @param [index] - char index where to insert text
*/
public insertText(text: string, index = this.length): void {
const [child, offset] = this.#findChildByIndex(index);
child?.insertText(text, index - offset);
}
/**
* Removes text form the specified range
*
* @param [start] - start char index of the range, by default 0
* @param [end] - end char index of the range, by default length of the text value
* @returns {string} removed text
*/
public removeText(start = 0, end = this.length): string {
const result = this.#reduceChildrenInRange(
start,
end,
(acc, child, childStart, childEnd) => {
return acc + child.removeText(childStart, childEnd);
},
''
);
if (this.length === 0) {
this.remove();
}
return result;
}
/**
* Returns text from the specified range
*
* @param [start] - start char index of the range, by default 0
* @param [end] - end char index of the range, by default length of the text value
*/
public getText(start = 0, end = this.length): string {
return this.#reduceChildrenInRange(
start,
end,
(acc, child, childStart, childEnd) => {
return acc + child.getText(childStart, childEnd);
},
''
);
}
/**
* Returns inline fragments for subtree including current node from the specified range
*
* @param [start] - start char index of the range, by default 0
* @param [end] - end char index of the range, by default length of the text value
*/
public getFragments(start = 0, end = this.length): InlineFragment[] {
return this.#reduceChildrenInRange<InlineFragment[]>(
start,
end,
(acc, child, childStart, childEnd) => {
/**
* If child is not a FormattingNode, it doesn't include any fragments. So we skip it.
*/
if (!(child instanceof FormattingNode)) {
return acc;
}
acc.push(...child.getFragments(childStart, childEnd));
return acc;
},
[ {
tool: this.#tool,
data: this.#data,
range: [start, end],
} ]
);
}
/**
* Splits current node by the specified index
*
* @param index - char index where to split the node
* @returns {FormattingNode | null} new node
*/
public split(index: number): FormattingNode | null {
if (index === 0 || index === this.length) {
return null;
}
const newNode = new FormattingNode({
tool: this.#tool,
data: this.#data,
});
const [child, offset] = this.#findChildByIndex(index);
if (!child) {
return null;
}
// Have to save length as it is changed after split
const childLength = child.length;
const splitNode = child.split(index - offset);
let midNodeIndex = this.children.indexOf(child);
/**
* If node is split or if node is not split but index equals to child length, we should split children from the next node
*/
if (splitNode || (index - offset === childLength)) {
midNodeIndex += 1;
}
newNode.append(...this.children.slice(midNodeIndex));
this.parent?.insertAfter(this, newNode);
return newNode;
}
/**
* Applies formatting to the text with specified inline tool in the specified range
*
* @param tool - name of inline tool to apply
* @param start - char start index of the range
* @param end - char end index of the range
* @param [data] - inline tool data if applicable
*/
public format(tool: InlineToolName, start: number, end: number, data?: InlineToolData): InlineNode[] {
/**
* In case current tool is the same as new one, do nothing
*/
if (tool === this.#tool) {
return [];
}
return this.#reduceChildrenInRange<InlineNode[]>(
start,
end,
(acc, child, childStart, childEnd) => {
acc.push(...child.format(tool, childStart, childEnd, data));
return acc;
},
[]
);
}
/**
* Iterates through children in range and calls callback for each
*
* @param start - range start char index
* @param end - range end char index
* @param callback - callback to apply on children
* @param initialValue - initial accumulator value
* @private
*/
#reduceChildrenInRange<Acc>(
start: number,
end: number,
callback: (acc: Acc, child: InlineNode, start: number, end: number) => Acc,
initialValue: Acc
): Acc {
let result = initialValue;
for (const child of this.children) {
if (start < child. | length && end > 0 && start < end) { |
result = callback(result, child, Math.max(start, 0), Math.min(child.length, end));
}
start -= child.length;
end -= child.length;
}
return result;
}
/**
* Returns child by passed text index
*
* @param index - char index
* @private
*/
#findChildByIndex(index: number): [child: InlineNode & ChildNode | null, offset: number] {
let totalLength = 0;
for (const child of this.children) {
if (index <= child.length + totalLength) {
return [child, totalLength];
}
totalLength += child.length;
}
return [null, totalLength];
}
}
| src/entities/FormattingNode/index.ts | editor-js-document-model-4cb9623 | [
{
"filename": "src/entities/TextNode/index.ts",
"retrieved_chunk": " *\n * @param start - start char index of the range\n * @param end - end char index of the range\n */\n #cloneContents(start: number, end: number): TextNode {\n return new TextNode({\n value: this.getText(start, end),\n });\n }\n}",
"score": 41.10763827841306
},
{
"filename": "src/entities/TextNode/index.ts",
"retrieved_chunk": " public format(tool: InlineToolName, start: number, end: number, data?: InlineToolData): InlineNode[] {\n this.#validateIndex(start);\n this.#validateIndex(end);\n const formattingNode = new FormattingNode({\n tool,\n data,\n });\n const fragments: ChildNode[] = [];\n /**\n * If start index is greater than 0, we need extract part of the text before the start index",
"score": 38.6005882626302
},
{
"filename": "src/entities/interfaces/InlineNode.ts",
"retrieved_chunk": " * @param [start] - start char index of the range\n * @param [end] - end char index of the range\n * @param [data] - Inline Tool data if applicable\n * @returns {InlineNode[]} - array of nodes after applied formatting\n */\n format(name: InlineToolName, start?: number, end?: number, data?: InlineToolData): InlineNode[];\n /**\n * Inserts text at passed char index\n *\n * @param text - text to insert",
"score": 36.25998578728295
},
{
"filename": "src/entities/interfaces/InlineNode.ts",
"retrieved_chunk": " tool: InlineToolName;\n /**\n * Inline Tool Data if applicable\n */\n data?: InlineToolData;\n /**\n * Range of the fragment\n */\n range: [start: number, end: number];\n}",
"score": 35.37480316742605
},
{
"filename": "src/entities/interfaces/InlineNode.ts",
"retrieved_chunk": " * Returns text value in passed range\n *\n * @param start - start char index of the range\n * @param end - end char index of the range\n */\n getText(start?: number, end?: number): string;\n /**\n * Applies inline formatting on the passed range\n *\n * @param name - name of Inline Tool to apply",
"score": 34.00811904656855
}
] | typescript | length && end > 0 && start < end) { |
import {
FormattingNodeConstructorParameters,
InlineToolName,
InlineToolData
} from './types';
import { ChildNode, InlineFragment, InlineNode, InlineNodeSerialized, ParentNode } from '../interfaces';
export * from './types';
/**
* We need to extend FormattingNode interface with ChildNode and ParentNode ones to use the methods from mixins
*/
export interface FormattingNode extends ChildNode, ParentNode {}
/**
* FormattingNode class represents a node in a tree-like structure, used to store and manipulate formatted text content
*/
@ParentNode
@ChildNode
export class FormattingNode implements InlineNode {
/**
* Private field representing the name of the formatting tool applied to the content
*/
#tool: InlineToolName;
/**
* Any additional data associated with the formatting tool
*/
#data?: InlineToolData;
/**
* Constructor for FormattingNode class.
*
* @param args - FormattingNode constructor arguments.
* @param args.tool - The name of the formatting tool applied to the content.
* @param args.data - Any additional data associated with the formatting.
*/
constructor({ tool, data }: FormattingNodeConstructorParameters) {
this.#tool = tool;
this.#data = data;
}
/**
* Returns text value length of current node (including subtree)
*/
public get length(): number {
return this.children.reduce | ((sum, child) => sum + child.length, 0); |
}
/**
* Returns serialized value of the node: text and formatting fragments
*/
public get serialized(): InlineNodeSerialized {
return {
text: this.getText(),
fragments: this.getFragments(),
};
}
/**
* Inserts text to the specified index, by default appends text to the end of the current value
*
* @param text - text to insert
* @param [index] - char index where to insert text
*/
public insertText(text: string, index = this.length): void {
const [child, offset] = this.#findChildByIndex(index);
child?.insertText(text, index - offset);
}
/**
* Removes text form the specified range
*
* @param [start] - start char index of the range, by default 0
* @param [end] - end char index of the range, by default length of the text value
* @returns {string} removed text
*/
public removeText(start = 0, end = this.length): string {
const result = this.#reduceChildrenInRange(
start,
end,
(acc, child, childStart, childEnd) => {
return acc + child.removeText(childStart, childEnd);
},
''
);
if (this.length === 0) {
this.remove();
}
return result;
}
/**
* Returns text from the specified range
*
* @param [start] - start char index of the range, by default 0
* @param [end] - end char index of the range, by default length of the text value
*/
public getText(start = 0, end = this.length): string {
return this.#reduceChildrenInRange(
start,
end,
(acc, child, childStart, childEnd) => {
return acc + child.getText(childStart, childEnd);
},
''
);
}
/**
* Returns inline fragments for subtree including current node from the specified range
*
* @param [start] - start char index of the range, by default 0
* @param [end] - end char index of the range, by default length of the text value
*/
public getFragments(start = 0, end = this.length): InlineFragment[] {
return this.#reduceChildrenInRange<InlineFragment[]>(
start,
end,
(acc, child, childStart, childEnd) => {
/**
* If child is not a FormattingNode, it doesn't include any fragments. So we skip it.
*/
if (!(child instanceof FormattingNode)) {
return acc;
}
acc.push(...child.getFragments(childStart, childEnd));
return acc;
},
[ {
tool: this.#tool,
data: this.#data,
range: [start, end],
} ]
);
}
/**
* Splits current node by the specified index
*
* @param index - char index where to split the node
* @returns {FormattingNode | null} new node
*/
public split(index: number): FormattingNode | null {
if (index === 0 || index === this.length) {
return null;
}
const newNode = new FormattingNode({
tool: this.#tool,
data: this.#data,
});
const [child, offset] = this.#findChildByIndex(index);
if (!child) {
return null;
}
// Have to save length as it is changed after split
const childLength = child.length;
const splitNode = child.split(index - offset);
let midNodeIndex = this.children.indexOf(child);
/**
* If node is split or if node is not split but index equals to child length, we should split children from the next node
*/
if (splitNode || (index - offset === childLength)) {
midNodeIndex += 1;
}
newNode.append(...this.children.slice(midNodeIndex));
this.parent?.insertAfter(this, newNode);
return newNode;
}
/**
* Applies formatting to the text with specified inline tool in the specified range
*
* @param tool - name of inline tool to apply
* @param start - char start index of the range
* @param end - char end index of the range
* @param [data] - inline tool data if applicable
*/
public format(tool: InlineToolName, start: number, end: number, data?: InlineToolData): InlineNode[] {
/**
* In case current tool is the same as new one, do nothing
*/
if (tool === this.#tool) {
return [];
}
return this.#reduceChildrenInRange<InlineNode[]>(
start,
end,
(acc, child, childStart, childEnd) => {
acc.push(...child.format(tool, childStart, childEnd, data));
return acc;
},
[]
);
}
/**
* Iterates through children in range and calls callback for each
*
* @param start - range start char index
* @param end - range end char index
* @param callback - callback to apply on children
* @param initialValue - initial accumulator value
* @private
*/
#reduceChildrenInRange<Acc>(
start: number,
end: number,
callback: (acc: Acc, child: InlineNode, start: number, end: number) => Acc,
initialValue: Acc
): Acc {
let result = initialValue;
for (const child of this.children) {
if (start < child.length && end > 0 && start < end) {
result = callback(result, child, Math.max(start, 0), Math.min(child.length, end));
}
start -= child.length;
end -= child.length;
}
return result;
}
/**
* Returns child by passed text index
*
* @param index - char index
* @private
*/
#findChildByIndex(index: number): [child: InlineNode & ChildNode | null, offset: number] {
let totalLength = 0;
for (const child of this.children) {
if (index <= child.length + totalLength) {
return [child, totalLength];
}
totalLength += child.length;
}
return [null, totalLength];
}
}
| src/entities/FormattingNode/index.ts | editor-js-document-model-4cb9623 | [
{
"filename": "src/entities/TextNode/index.ts",
"retrieved_chunk": " constructor({ value = '' }: TextNodeConstructorParameters = {}) {\n this.#value = value;\n }\n /**\n * Returns length of the text\n */\n public get length(): number {\n return this.#value.length;\n }\n /**",
"score": 37.00102700416433
},
{
"filename": "src/entities/EditorDocument/index.ts",
"retrieved_chunk": " * @param args.properties - The properties of the document.\n */\n constructor({ children, properties }: EditorDocumentConstructorParameters) {\n this.#children = children;\n this.#properties = properties;\n }\n /**\n * Returns count of child BlockNodes of the EditorDocument.\n */\n public get length(): number {",
"score": 29.76167420623889
},
{
"filename": "src/entities/TextNode/index.ts",
"retrieved_chunk": " public format(tool: InlineToolName, start: number, end: number, data?: InlineToolData): InlineNode[] {\n this.#validateIndex(start);\n this.#validateIndex(end);\n const formattingNode = new FormattingNode({\n tool,\n data,\n });\n const fragments: ChildNode[] = [];\n /**\n * If start index is greater than 0, we need extract part of the text before the start index",
"score": 29.38928778991287
},
{
"filename": "src/entities/interfaces/ParentNode.ts",
"retrieved_chunk": " this.#children = children;\n this.children.forEach(child => child.appendTo(this));\n }\n /**\n * Returns node's children\n */\n public get children(): ChildNode[] {\n return this.#children;\n }\n /**",
"score": 27.602935231395485
},
{
"filename": "src/entities/BlockTune/index.ts",
"retrieved_chunk": " */\n public update(key: string, value: unknown): void {\n this.#data[key] = value;\n }\n /**\n * Returns serialized version of the BlockTune.\n */\n public get serialized(): BlockTuneSerialized {\n return {\n name: this.#name,",
"score": 24.90288097171081
}
] | typescript | ((sum, child) => sum + child.length, 0); |
import {
FormattingNodeConstructorParameters,
InlineToolName,
InlineToolData
} from './types';
import { ChildNode, InlineFragment, InlineNode, InlineNodeSerialized, ParentNode } from '../interfaces';
export * from './types';
/**
* We need to extend FormattingNode interface with ChildNode and ParentNode ones to use the methods from mixins
*/
export interface FormattingNode extends ChildNode, ParentNode {}
/**
* FormattingNode class represents a node in a tree-like structure, used to store and manipulate formatted text content
*/
@ParentNode
@ChildNode
export class FormattingNode implements InlineNode {
/**
* Private field representing the name of the formatting tool applied to the content
*/
#tool: InlineToolName;
/**
* Any additional data associated with the formatting tool
*/
#data?: InlineToolData;
/**
* Constructor for FormattingNode class.
*
* @param args - FormattingNode constructor arguments.
* @param args.tool - The name of the formatting tool applied to the content.
* @param args.data - Any additional data associated with the formatting.
*/
constructor({ tool, data }: FormattingNodeConstructorParameters) {
this.#tool = tool;
this.#data = data;
}
/**
* Returns text value length of current node (including subtree)
*/
public get length(): number {
return this.children.reduce(( | sum, child) => sum + child.length, 0); |
}
/**
* Returns serialized value of the node: text and formatting fragments
*/
public get serialized(): InlineNodeSerialized {
return {
text: this.getText(),
fragments: this.getFragments(),
};
}
/**
* Inserts text to the specified index, by default appends text to the end of the current value
*
* @param text - text to insert
* @param [index] - char index where to insert text
*/
public insertText(text: string, index = this.length): void {
const [child, offset] = this.#findChildByIndex(index);
child?.insertText(text, index - offset);
}
/**
* Removes text form the specified range
*
* @param [start] - start char index of the range, by default 0
* @param [end] - end char index of the range, by default length of the text value
* @returns {string} removed text
*/
public removeText(start = 0, end = this.length): string {
const result = this.#reduceChildrenInRange(
start,
end,
(acc, child, childStart, childEnd) => {
return acc + child.removeText(childStart, childEnd);
},
''
);
if (this.length === 0) {
this.remove();
}
return result;
}
/**
* Returns text from the specified range
*
* @param [start] - start char index of the range, by default 0
* @param [end] - end char index of the range, by default length of the text value
*/
public getText(start = 0, end = this.length): string {
return this.#reduceChildrenInRange(
start,
end,
(acc, child, childStart, childEnd) => {
return acc + child.getText(childStart, childEnd);
},
''
);
}
/**
* Returns inline fragments for subtree including current node from the specified range
*
* @param [start] - start char index of the range, by default 0
* @param [end] - end char index of the range, by default length of the text value
*/
public getFragments(start = 0, end = this.length): InlineFragment[] {
return this.#reduceChildrenInRange<InlineFragment[]>(
start,
end,
(acc, child, childStart, childEnd) => {
/**
* If child is not a FormattingNode, it doesn't include any fragments. So we skip it.
*/
if (!(child instanceof FormattingNode)) {
return acc;
}
acc.push(...child.getFragments(childStart, childEnd));
return acc;
},
[ {
tool: this.#tool,
data: this.#data,
range: [start, end],
} ]
);
}
/**
* Splits current node by the specified index
*
* @param index - char index where to split the node
* @returns {FormattingNode | null} new node
*/
public split(index: number): FormattingNode | null {
if (index === 0 || index === this.length) {
return null;
}
const newNode = new FormattingNode({
tool: this.#tool,
data: this.#data,
});
const [child, offset] = this.#findChildByIndex(index);
if (!child) {
return null;
}
// Have to save length as it is changed after split
const childLength = child.length;
const splitNode = child.split(index - offset);
let midNodeIndex = this.children.indexOf(child);
/**
* If node is split or if node is not split but index equals to child length, we should split children from the next node
*/
if (splitNode || (index - offset === childLength)) {
midNodeIndex += 1;
}
newNode.append(...this.children.slice(midNodeIndex));
this.parent?.insertAfter(this, newNode);
return newNode;
}
/**
* Applies formatting to the text with specified inline tool in the specified range
*
* @param tool - name of inline tool to apply
* @param start - char start index of the range
* @param end - char end index of the range
* @param [data] - inline tool data if applicable
*/
public format(tool: InlineToolName, start: number, end: number, data?: InlineToolData): InlineNode[] {
/**
* In case current tool is the same as new one, do nothing
*/
if (tool === this.#tool) {
return [];
}
return this.#reduceChildrenInRange<InlineNode[]>(
start,
end,
(acc, child, childStart, childEnd) => {
acc.push(...child.format(tool, childStart, childEnd, data));
return acc;
},
[]
);
}
/**
* Iterates through children in range and calls callback for each
*
* @param start - range start char index
* @param end - range end char index
* @param callback - callback to apply on children
* @param initialValue - initial accumulator value
* @private
*/
#reduceChildrenInRange<Acc>(
start: number,
end: number,
callback: (acc: Acc, child: InlineNode, start: number, end: number) => Acc,
initialValue: Acc
): Acc {
let result = initialValue;
for (const child of this.children) {
if (start < child.length && end > 0 && start < end) {
result = callback(result, child, Math.max(start, 0), Math.min(child.length, end));
}
start -= child.length;
end -= child.length;
}
return result;
}
/**
* Returns child by passed text index
*
* @param index - char index
* @private
*/
#findChildByIndex(index: number): [child: InlineNode & ChildNode | null, offset: number] {
let totalLength = 0;
for (const child of this.children) {
if (index <= child.length + totalLength) {
return [child, totalLength];
}
totalLength += child.length;
}
return [null, totalLength];
}
}
| src/entities/FormattingNode/index.ts | editor-js-document-model-4cb9623 | [
{
"filename": "src/entities/TextNode/index.ts",
"retrieved_chunk": " constructor({ value = '' }: TextNodeConstructorParameters = {}) {\n this.#value = value;\n }\n /**\n * Returns length of the text\n */\n public get length(): number {\n return this.#value.length;\n }\n /**",
"score": 37.00102700416433
},
{
"filename": "src/entities/EditorDocument/index.ts",
"retrieved_chunk": " * @param args.properties - The properties of the document.\n */\n constructor({ children, properties }: EditorDocumentConstructorParameters) {\n this.#children = children;\n this.#properties = properties;\n }\n /**\n * Returns count of child BlockNodes of the EditorDocument.\n */\n public get length(): number {",
"score": 29.76167420623889
},
{
"filename": "src/entities/TextNode/index.ts",
"retrieved_chunk": " public format(tool: InlineToolName, start: number, end: number, data?: InlineToolData): InlineNode[] {\n this.#validateIndex(start);\n this.#validateIndex(end);\n const formattingNode = new FormattingNode({\n tool,\n data,\n });\n const fragments: ChildNode[] = [];\n /**\n * If start index is greater than 0, we need extract part of the text before the start index",
"score": 29.38928778991287
},
{
"filename": "src/entities/interfaces/ParentNode.ts",
"retrieved_chunk": " this.#children = children;\n this.children.forEach(child => child.appendTo(this));\n }\n /**\n * Returns node's children\n */\n public get children(): ChildNode[] {\n return this.#children;\n }\n /**",
"score": 27.602935231395485
},
{
"filename": "src/entities/BlockTune/index.ts",
"retrieved_chunk": " */\n public update(key: string, value: unknown): void {\n this.#data[key] = value;\n }\n /**\n * Returns serialized version of the BlockTune.\n */\n public get serialized(): BlockTuneSerialized {\n return {\n name: this.#name,",
"score": 24.90288097171081
}
] | typescript | sum, child) => sum + child.length, 0); |
import { describe, it, expect, beforeEach } from '@jest/globals';
import { TextNode } from './index';
import { createInlineToolName, FormattingNode } from '../FormattingNode';
import type { ParentNode } from '../interfaces';
describe('TextNode', () => {
const initialText = 'initial text';
const text = 'some text';
const parentMock = {
insertAfter: jest.fn(),
removeChild: jest.fn(),
append: jest.fn(),
children: [],
} as unknown as ParentNode;
let node: TextNode;
beforeEach(() => {
node = new TextNode({
value: initialText,
parent: parentMock as FormattingNode,
});
});
it('should have empty value by default', () => {
node = new TextNode();
expect(node.getText()).toEqual('');
});
describe('.insertText()', () => {
it('should set text to value if node is empty', () => {
node = new TextNode();
node.insertText(text);
expect(node.getText()).toEqual(text);
});
it('should append text if not empty', () => {
node.insertText(text);
expect(node.getText()).toEqual(initialText + text);
});
it('should prepend text if index is 0 and node is not empty', () => {
node.insertText(text, 0);
expect(node.getText()).toEqual(text + initialText);
});
it('should insert text at index if not empty', () => {
const index = 5;
node.insertText(text, index);
expect(node.getText()).toEqual(initialText.slice(0, index) + text + initialText.slice(index));
});
it('should throw an error if index is less than 0', () => {
const f = (): void => node.insertText(text, -1);
expect(f).toThrowError();
});
it('should throw an error if index is greater than node length', () => {
const f = (): void => node.insertText(text, initialText.length + 1);
expect(f).toThrowError();
});
});
describe('.getText()', () => {
it('should return sliced value if start provided', () => {
const start = 5;
expect(node.getText(start)).toEqual(initialText.slice(start));
});
it('should return sliced value if end provided', () => {
const end = 6;
expect(node.getText(0, end)).toEqual(initialText.slice(0, end));
});
it('should return sliced value if full range provided', () => {
const start = 3;
const end = 9;
expect(node.getText(start, end)).toEqual(initialText.slice(start, end));
});
it('should throw an error if start is invalid index', () => {
expect(() => node.getText(-1)).toThrowError();
expect(() => node.getText(initialText.length + 1)).toThrowError();
});
it('should throw an error if end is invalid index', () => {
expect(() => node.getText(0, initialText.length + 1)).toThrowError();
});
it('should throw an error if end index is greater than start index', () => {
const start = 5;
const end = 3;
expect(() => node.getText(start, end)).toThrowError();
});
it('should not throw an error if end index is equal to start index', () => {
const start = 5;
const end = 5;
expect(() => node.getText(start, end)).not.toThrowError();
});
});
describe('.removeText()', () => {
it('should remove all text by default', () => {
node.removeText();
expect(node.getText()).toEqual('');
});
it('should remove text from specified index', () => {
const start = 3;
node.removeText(start);
expect(node.getText()).toEqual(initialText.slice(0, start));
});
it('should remove text from 0 to specified end index', () => {
const end = 8;
node.removeText(0, end);
expect(node.getText()).toEqual(initialText.slice(end));
});
it('should remove text from specified start and end indecies', () => {
const start = 3;
const end = 8;
node.removeText(start, end);
expect(node.getText()).toEqual(initialText.slice(0, start) + initialText.slice(end));
});
it('should call remove() method if node is empty after removeText() call', () => {
jest.spyOn(node, 'remove');
node.removeText();
expect(node.remove).toBeCalled();
});
});
describe('.format()', () => {
it('should return just one FormattingNode, if formatting full TextNode', () => {
const name = createInlineToolName('bold');
const fragments = node.format(name, 0, initialText.length);
expect(fragments).toHaveLength(1);
expect(fragments[0]).toBeInstanceOf(FormattingNode);
});
it('should return two fragments if formatting from the start, but not to the end', () => {
const name = createInlineToolName('bold');
const end = 5;
const fragments = node.format(name, 0, end);
expect(fragments).toHaveLength(2);
expect(fragments[0]).toBeInstanceOf(FormattingNode);
expect(fragments[1]).toBeInstanceOf(TextNode);
});
it('should return two fragments if formatting to the end, but not from the start', () => {
const name = createInlineToolName('bold');
const start = 5;
const fragments = node.format(name, start, initialText.length);
expect(fragments).toHaveLength(2);
expect(fragments[0]).toBeInstanceOf(TextNode);
expect(fragments[1]).toBeInstanceOf(FormattingNode);
});
it('should return three fragments if formatting in the middle', () => {
const name = createInlineToolName('bold');
const start = 5;
const end = 8;
const fragments = node.format(name, start, end);
// eslint-disable-next-line @typescript-eslint/no-magic-numbers
expect(fragments).toHaveLength(3);
expect(fragments[0]).toBeInstanceOf(TextNode);
expect(fragments[1]).toBeInstanceOf(FormattingNode);
expect(fragments[2]).toBeInstanceOf(TextNode);
});
it('should return FormattingNode with a TextNode as a child with correct text value', () => {
const name = createInlineToolName('bold');
const start = 5;
const end = 8;
const fragments = node.format(name, start, end);
const formattingNode = fragments[1] as FormattingNode;
expect( | formattingNode.children[0].getText()).toEqual(initialText.slice(start, end)); |
});
it('should call parent\'s insertAfter with new nodes', () => {
const name = createInlineToolName('bold');
const start = 5;
const end = 8;
const fragments = node.format(name, start, end);
expect(parentMock.insertAfter).toBeCalledWith(node, ...fragments);
});
});
describe('.split()', () => {
const index = 5;
it('should not split (return null) if index is 0', () => {
const newNode = node.split(0);
expect(newNode).toBeNull();
});
it('should not split (return null) if index equals text length', () => {
const newNode = node.split(initialText.length);
expect(newNode).toBeNull();
});
it('should create new TextNode on split', () => {
const newNode = node.split(index);
expect(newNode).toBeInstanceOf(TextNode);
});
it('should create new TextNode with text value splitted from the original one', () => {
const newNode = node.split(index);
expect(newNode?.getText()).toEqual(initialText.slice(index));
});
it('should remove split text value from the original node', () => {
node.split(index);
expect(node.getText()).toEqual(initialText.slice(0, index));
});
it('should insert new node to the parent', () => {
const newNode = node.split(index);
expect(parentMock.insertAfter).toBeCalledWith(node, newNode);
});
});
describe('.serialized', () => {
it('should return text value and empty array of fragments', () => {
const result = node.serialized;
expect(result).toEqual({
text: initialText,
fragments: [],
});
});
});
});
| src/entities/TextNode/TextNode.spec.ts | editor-js-document-model-4cb9623 | [
{
"filename": "src/entities/TextNode/index.ts",
"retrieved_chunk": " */\n if (start > 0) {\n fragments.push(this.#cloneContents(0, start));\n }\n /**\n * Formatting is applied to the specified range\n */\n const formattedFragment = this.#cloneContents(start, end);\n formattedFragment.appendTo(formattingNode);\n fragments.push(formattingNode);",
"score": 45.032978785181484
},
{
"filename": "src/entities/TextNode/index.ts",
"retrieved_chunk": " public format(tool: InlineToolName, start: number, end: number, data?: InlineToolData): InlineNode[] {\n this.#validateIndex(start);\n this.#validateIndex(end);\n const formattingNode = new FormattingNode({\n tool,\n data,\n });\n const fragments: ChildNode[] = [];\n /**\n * If start index is greater than 0, we need extract part of the text before the start index",
"score": 42.06958960443021
},
{
"filename": "src/entities/FormattingNode/FormattingNode.spec.ts",
"retrieved_chunk": " describe('.serialized', () => {\n it('should return concatenated text of all fragments with fragments list describing formatting', () => {\n const result = node.serialized;\n expect(result).toEqual({\n text: childMock.getText() + anotherChildMock.getText(),\n fragments: [\n {\n tool,\n data,\n range: [0, node.length],",
"score": 35.75237528270751
},
{
"filename": "src/entities/FormattingNode/FormattingNode.spec.ts",
"retrieved_chunk": " removeSpy.mockRestore();\n lengthSpy.mockRestore();\n });\n });\n describe('.getText()', () => {\n const start = 3;\n const end = 5;\n it('should call getText() for the relevant child', () => {\n node.getText(start, end);\n expect(childMock.getText).toBeCalledWith(start, end);",
"score": 34.465850461620136
},
{
"filename": "src/entities/FormattingNode/FormattingNode.spec.ts",
"retrieved_chunk": " const newNode = node.split(index);\n expect(parentMock.insertAfter).toBeCalledWith(node, newNode);\n });\n });\n describe('.format()', () => {\n const start = 3;\n const end = 5;\n it('should apply formatting to the relevant child', () => {\n node.format(anotherTool, start, end);\n expect(childMock.format).toBeCalledWith(anotherTool, start, end, undefined);",
"score": 33.246267582954516
}
] | typescript | formattingNode.children[0].getText()).toEqual(initialText.slice(start, end)); |
import { describe, it, expect, beforeEach } from '@jest/globals';
import { TextNode } from './index';
import { createInlineToolName, FormattingNode } from '../FormattingNode';
import type { ParentNode } from '../interfaces';
describe('TextNode', () => {
const initialText = 'initial text';
const text = 'some text';
const parentMock = {
insertAfter: jest.fn(),
removeChild: jest.fn(),
append: jest.fn(),
children: [],
} as unknown as ParentNode;
let node: TextNode;
beforeEach(() => {
node = new TextNode({
value: initialText,
parent: parentMock as FormattingNode,
});
});
it('should have empty value by default', () => {
node = new TextNode();
expect(node.getText()).toEqual('');
});
describe('.insertText()', () => {
it('should set text to value if node is empty', () => {
node = new TextNode();
node.insertText(text);
expect(node.getText()).toEqual(text);
});
it('should append text if not empty', () => {
node.insertText(text);
expect(node.getText()).toEqual(initialText + text);
});
it('should prepend text if index is 0 and node is not empty', () => {
node.insertText(text, 0);
expect(node.getText()).toEqual(text + initialText);
});
it('should insert text at index if not empty', () => {
const index = 5;
node.insertText(text, index);
expect(node.getText()).toEqual(initialText.slice(0, index) + text + initialText.slice(index));
});
it('should throw an error if index is less than 0', () => {
const f = (): void => node.insertText(text, -1);
expect(f).toThrowError();
});
it('should throw an error if index is greater than node length', () => {
const f = (): void => node.insertText(text, initialText.length + 1);
expect(f).toThrowError();
});
});
describe('.getText()', () => {
it('should return sliced value if start provided', () => {
const start = 5;
expect(node.getText(start)).toEqual(initialText.slice(start));
});
it('should return sliced value if end provided', () => {
const end = 6;
expect(node.getText(0, end)).toEqual(initialText.slice(0, end));
});
it('should return sliced value if full range provided', () => {
const start = 3;
const end = 9;
expect(node.getText(start, end)).toEqual(initialText.slice(start, end));
});
it('should throw an error if start is invalid index', () => {
expect(() => node.getText(-1)).toThrowError();
expect(() => node.getText(initialText.length + 1)).toThrowError();
});
it('should throw an error if end is invalid index', () => {
expect(() => node.getText(0, initialText.length + 1)).toThrowError();
});
it('should throw an error if end index is greater than start index', () => {
const start = 5;
const end = 3;
expect(() => node.getText(start, end)).toThrowError();
});
it('should not throw an error if end index is equal to start index', () => {
const start = 5;
const end = 5;
expect(() => node.getText(start, end)).not.toThrowError();
});
});
describe('.removeText()', () => {
it('should remove all text by default', () => {
node.removeText();
expect(node.getText()).toEqual('');
});
it('should remove text from specified index', () => {
const start = 3;
node.removeText(start);
expect(node.getText()).toEqual(initialText.slice(0, start));
});
it('should remove text from 0 to specified end index', () => {
const end = 8;
node.removeText(0, end);
expect(node.getText()).toEqual(initialText.slice(end));
});
it('should remove text from specified start and end indecies', () => {
const start = 3;
const end = 8;
node.removeText(start, end);
expect(node.getText()).toEqual(initialText.slice(0, start) + initialText.slice(end));
});
it('should call remove() method if node is empty after removeText() call', () => {
jest.spyOn(node, 'remove');
node.removeText();
| expect(node.remove).toBeCalled(); |
});
});
describe('.format()', () => {
it('should return just one FormattingNode, if formatting full TextNode', () => {
const name = createInlineToolName('bold');
const fragments = node.format(name, 0, initialText.length);
expect(fragments).toHaveLength(1);
expect(fragments[0]).toBeInstanceOf(FormattingNode);
});
it('should return two fragments if formatting from the start, but not to the end', () => {
const name = createInlineToolName('bold');
const end = 5;
const fragments = node.format(name, 0, end);
expect(fragments).toHaveLength(2);
expect(fragments[0]).toBeInstanceOf(FormattingNode);
expect(fragments[1]).toBeInstanceOf(TextNode);
});
it('should return two fragments if formatting to the end, but not from the start', () => {
const name = createInlineToolName('bold');
const start = 5;
const fragments = node.format(name, start, initialText.length);
expect(fragments).toHaveLength(2);
expect(fragments[0]).toBeInstanceOf(TextNode);
expect(fragments[1]).toBeInstanceOf(FormattingNode);
});
it('should return three fragments if formatting in the middle', () => {
const name = createInlineToolName('bold');
const start = 5;
const end = 8;
const fragments = node.format(name, start, end);
// eslint-disable-next-line @typescript-eslint/no-magic-numbers
expect(fragments).toHaveLength(3);
expect(fragments[0]).toBeInstanceOf(TextNode);
expect(fragments[1]).toBeInstanceOf(FormattingNode);
expect(fragments[2]).toBeInstanceOf(TextNode);
});
it('should return FormattingNode with a TextNode as a child with correct text value', () => {
const name = createInlineToolName('bold');
const start = 5;
const end = 8;
const fragments = node.format(name, start, end);
const formattingNode = fragments[1] as FormattingNode;
expect(formattingNode.children[0].getText()).toEqual(initialText.slice(start, end));
});
it('should call parent\'s insertAfter with new nodes', () => {
const name = createInlineToolName('bold');
const start = 5;
const end = 8;
const fragments = node.format(name, start, end);
expect(parentMock.insertAfter).toBeCalledWith(node, ...fragments);
});
});
describe('.split()', () => {
const index = 5;
it('should not split (return null) if index is 0', () => {
const newNode = node.split(0);
expect(newNode).toBeNull();
});
it('should not split (return null) if index equals text length', () => {
const newNode = node.split(initialText.length);
expect(newNode).toBeNull();
});
it('should create new TextNode on split', () => {
const newNode = node.split(index);
expect(newNode).toBeInstanceOf(TextNode);
});
it('should create new TextNode with text value splitted from the original one', () => {
const newNode = node.split(index);
expect(newNode?.getText()).toEqual(initialText.slice(index));
});
it('should remove split text value from the original node', () => {
node.split(index);
expect(node.getText()).toEqual(initialText.slice(0, index));
});
it('should insert new node to the parent', () => {
const newNode = node.split(index);
expect(parentMock.insertAfter).toBeCalledWith(node, newNode);
});
});
describe('.serialized', () => {
it('should return text value and empty array of fragments', () => {
const result = node.serialized;
expect(result).toEqual({
text: initialText,
fragments: [],
});
});
});
});
| src/entities/TextNode/TextNode.spec.ts | editor-js-document-model-4cb9623 | [
{
"filename": "src/entities/FormattingNode/FormattingNode.spec.ts",
"retrieved_chunk": " it('should remove all text by default', () => {\n node.removeText();\n expect(childMock.removeText).toBeCalledWith(0, childMock.length);\n expect(anotherChildMock.removeText).toBeCalledWith(0, anotherChildMock.length);\n });\n it('should call remove() if length is 0 after removeText() call', () => {\n const removeSpy = jest.spyOn(node, 'remove');\n const lengthSpy = jest.spyOn(node, 'length', 'get').mockImplementation(() => 0);\n node.removeText();\n expect(removeSpy).toBeCalled();",
"score": 69.32672480765221
},
{
"filename": "src/entities/FormattingNode/FormattingNode.spec.ts",
"retrieved_chunk": " });\n });\n describe('.removeText()', () => {\n const start = 3;\n const end = 5;\n it('should remove text from appropriate child', () => {\n node.removeText(start, end);\n expect(childMock.removeText).toBeCalledWith(start, end);\n });\n it('should adjust indices by child offset', () => {",
"score": 65.43582266685557
},
{
"filename": "src/entities/FormattingNode/FormattingNode.spec.ts",
"retrieved_chunk": " const offset = childMock.length;\n node.removeText(offset + start, offset + end);\n expect(anotherChildMock.removeText).toBeCalledWith(start, end);\n });\n it('should call removeText for each affected child', () => {\n const offset = childMock.length;\n node.removeText(start, offset + end);\n expect(childMock.removeText).toBeCalledWith(start, offset);\n expect(anotherChildMock.removeText).toBeCalledWith(0, end);\n });",
"score": 59.522927393902044
},
{
"filename": "src/entities/FormattingNode/FormattingNode.spec.ts",
"retrieved_chunk": " removeSpy.mockRestore();\n lengthSpy.mockRestore();\n });\n });\n describe('.getText()', () => {\n const start = 3;\n const end = 5;\n it('should call getText() for the relevant child', () => {\n node.getText(start, end);\n expect(childMock.getText).toBeCalledWith(start, end);",
"score": 48.575540399255175
},
{
"filename": "src/entities/FormattingNode/FormattingNode.spec.ts",
"retrieved_chunk": " });\n it('should adjust index by child offset', () => {\n const offset = childMock.length;\n node.getText(offset + start, offset + end);\n expect(anotherChildMock.getText).toBeCalledWith(start, end);\n });\n it('should call getText for all relevant children', () => {\n const offset = childMock.length;\n node.getText(start, offset + end);\n expect(childMock.getText).toBeCalledWith(start, offset);",
"score": 45.19118642371069
}
] | typescript | expect(node.remove).toBeCalled(); |
import {
FormattingNodeConstructorParameters,
InlineToolName,
InlineToolData
} from './types';
import { ChildNode, InlineFragment, InlineNode, InlineNodeSerialized, ParentNode } from '../interfaces';
export * from './types';
/**
* We need to extend FormattingNode interface with ChildNode and ParentNode ones to use the methods from mixins
*/
export interface FormattingNode extends ChildNode, ParentNode {}
/**
* FormattingNode class represents a node in a tree-like structure, used to store and manipulate formatted text content
*/
@ParentNode
@ChildNode
export class FormattingNode implements InlineNode {
/**
* Private field representing the name of the formatting tool applied to the content
*/
#tool: InlineToolName;
/**
* Any additional data associated with the formatting tool
*/
#data?: InlineToolData;
/**
* Constructor for FormattingNode class.
*
* @param args - FormattingNode constructor arguments.
* @param args.tool - The name of the formatting tool applied to the content.
* @param args.data - Any additional data associated with the formatting.
*/
constructor({ tool, data }: FormattingNodeConstructorParameters) {
this.#tool = tool;
this.#data = data;
}
/**
* Returns text value length of current node (including subtree)
*/
public get length(): number {
return this.children.reduce((sum, child) => sum + child.length, 0);
}
/**
* Returns serialized value of the node: text and formatting fragments
*/
public get serialized(): InlineNodeSerialized {
return {
text: this.getText(),
fragments: this.getFragments(),
};
}
/**
* Inserts text to the specified index, by default appends text to the end of the current value
*
* @param text - text to insert
* @param [index] - char index where to insert text
*/
public insertText(text: string, index = this.length): void {
const [child, offset] = this.#findChildByIndex(index);
child?.insertText(text, index - offset);
}
/**
* Removes text form the specified range
*
* @param [start] - start char index of the range, by default 0
* @param [end] - end char index of the range, by default length of the text value
* @returns {string} removed text
*/
public removeText(start = 0, end = this.length): string {
const result = this.#reduceChildrenInRange(
start,
end,
(acc, child, childStart, childEnd) => {
return acc + child.removeText(childStart, childEnd);
},
''
);
if (this.length === 0) {
this.remove();
}
return result;
}
/**
* Returns text from the specified range
*
* @param [start] - start char index of the range, by default 0
* @param [end] - end char index of the range, by default length of the text value
*/
public getText(start = 0, end = this.length): string {
return this.#reduceChildrenInRange(
start,
end,
(acc, child, childStart, childEnd) => {
return acc + child.getText(childStart, childEnd);
},
''
);
}
/**
* Returns inline fragments for subtree including current node from the specified range
*
* @param [start] - start char index of the range, by default 0
* @param [end] - end char index of the range, by default length of the text value
*/
public getFragments(start = 0, end = this.length): InlineFragment[] {
return this.#reduceChildrenInRange<InlineFragment[]>(
start,
end,
(acc, child, childStart, childEnd) => {
/**
* If child is not a FormattingNode, it doesn't include any fragments. So we skip it.
*/
if (!(child instanceof FormattingNode)) {
return acc;
}
acc.push(...child.getFragments(childStart, childEnd));
return acc;
},
[ {
tool: this.#tool,
data: this.#data,
range: [start, end],
} ]
);
}
/**
* Splits current node by the specified index
*
* @param index - char index where to split the node
* @returns {FormattingNode | null} new node
*/
public split(index: number): FormattingNode | null {
if (index === 0 || index === this.length) {
return null;
}
const newNode = new FormattingNode({
tool: this.#tool,
data: this.#data,
});
const [child, offset] = this.#findChildByIndex(index);
if (!child) {
return null;
}
// Have to save length as it is changed after split
const childLength = child.length;
const splitNode = child.split(index - offset);
let midNodeIndex = this.children.indexOf(child);
/**
* If node is split or if node is not split but index equals to child length, we should split children from the next node
*/
if (splitNode || (index - offset === childLength)) {
midNodeIndex += 1;
}
newNode | .append(...this.children.slice(midNodeIndex)); |
this.parent?.insertAfter(this, newNode);
return newNode;
}
/**
* Applies formatting to the text with specified inline tool in the specified range
*
* @param tool - name of inline tool to apply
* @param start - char start index of the range
* @param end - char end index of the range
* @param [data] - inline tool data if applicable
*/
public format(tool: InlineToolName, start: number, end: number, data?: InlineToolData): InlineNode[] {
/**
* In case current tool is the same as new one, do nothing
*/
if (tool === this.#tool) {
return [];
}
return this.#reduceChildrenInRange<InlineNode[]>(
start,
end,
(acc, child, childStart, childEnd) => {
acc.push(...child.format(tool, childStart, childEnd, data));
return acc;
},
[]
);
}
/**
* Iterates through children in range and calls callback for each
*
* @param start - range start char index
* @param end - range end char index
* @param callback - callback to apply on children
* @param initialValue - initial accumulator value
* @private
*/
#reduceChildrenInRange<Acc>(
start: number,
end: number,
callback: (acc: Acc, child: InlineNode, start: number, end: number) => Acc,
initialValue: Acc
): Acc {
let result = initialValue;
for (const child of this.children) {
if (start < child.length && end > 0 && start < end) {
result = callback(result, child, Math.max(start, 0), Math.min(child.length, end));
}
start -= child.length;
end -= child.length;
}
return result;
}
/**
* Returns child by passed text index
*
* @param index - char index
* @private
*/
#findChildByIndex(index: number): [child: InlineNode & ChildNode | null, offset: number] {
let totalLength = 0;
for (const child of this.children) {
if (index <= child.length + totalLength) {
return [child, totalLength];
}
totalLength += child.length;
}
return [null, totalLength];
}
}
| src/entities/FormattingNode/index.ts | editor-js-document-model-4cb9623 | [
{
"filename": "src/entities/TextNode/TextNode.spec.ts",
"retrieved_chunk": " });\n describe('.split()', () => {\n const index = 5;\n it('should not split (return null) if index is 0', () => {\n const newNode = node.split(0);\n expect(newNode).toBeNull();\n });\n it('should not split (return null) if index equals text length', () => {\n const newNode = node.split(initialText.length);\n expect(newNode).toBeNull();",
"score": 50.86840038268646
},
{
"filename": "src/entities/FormattingNode/FormattingNode.spec.ts",
"retrieved_chunk": " it('should not split (return null) if index equals text length', () => {\n const newNode = node.split(node.length);\n expect(newNode).toBeNull();\n });\n it('should create new FormattingNode on split', () => {\n const newNode = node.split(index);\n expect(newNode).toBeInstanceOf(FormattingNode);\n });\n /**\n * @todo check this and related cases with integration tests",
"score": 45.93311537929346
},
{
"filename": "src/entities/FormattingNode/FormattingNode.spec.ts",
"retrieved_chunk": " */\n it('should create new FormattingNode with children split from the original one', () => {\n const newNode = node.split(childMock.length);\n expect(newNode?.children).toEqual([ anotherChildMock ]);\n });\n it('should call split method of child containing the specified index', () => {\n node.split(index);\n expect(childMock.split).toBeCalledWith(index);\n });\n it('should insert new node to the parent', () => {",
"score": 45.83639608415784
},
{
"filename": "src/entities/FormattingNode/FormattingNode.spec.ts",
"retrieved_chunk": " },\n ]);\n });\n });\n describe('.split()', () => {\n const index = 5;\n it('should not split (return null) if index is 0', () => {\n const newNode = node.split(0);\n expect(newNode).toBeNull();\n });",
"score": 44.275619703966385
},
{
"filename": "src/entities/TextNode/index.ts",
"retrieved_chunk": " /**\n * Splits current node into two nodes by the specified index\n *\n * @param index - char index where to split\n * @returns {TextNode|null} - new node or null if split is not applicable\n */\n public split(index: number): TextNode | null {\n if (index === 0 || index === this.length) {\n return null;\n }",
"score": 43.2547310983667
}
] | typescript | .append(...this.children.slice(midNodeIndex)); |
import { describe, it, expect, beforeEach } from '@jest/globals';
import { TextNode } from './index';
import { createInlineToolName, FormattingNode } from '../FormattingNode';
import type { ParentNode } from '../interfaces';
describe('TextNode', () => {
const initialText = 'initial text';
const text = 'some text';
const parentMock = {
insertAfter: jest.fn(),
removeChild: jest.fn(),
append: jest.fn(),
children: [],
} as unknown as ParentNode;
let node: TextNode;
beforeEach(() => {
node = new TextNode({
value: initialText,
parent: parentMock as FormattingNode,
});
});
it('should have empty value by default', () => {
node = new TextNode();
expect(node.getText()).toEqual('');
});
describe('.insertText()', () => {
it('should set text to value if node is empty', () => {
node = new TextNode();
node.insertText(text);
expect(node.getText()).toEqual(text);
});
it('should append text if not empty', () => {
node.insertText(text);
expect(node.getText()).toEqual(initialText + text);
});
it('should prepend text if index is 0 and node is not empty', () => {
node.insertText(text, 0);
expect(node.getText()).toEqual(text + initialText);
});
it('should insert text at index if not empty', () => {
const index = 5;
node.insertText(text, index);
expect(node.getText()).toEqual(initialText.slice(0, index) + text + initialText.slice(index));
});
it('should throw an error if index is less than 0', () => {
const f = (): void => node.insertText(text, -1);
expect(f).toThrowError();
});
it('should throw an error if index is greater than node length', () => {
const f = (): void => node.insertText(text, initialText.length + 1);
expect(f).toThrowError();
});
});
describe('.getText()', () => {
it('should return sliced value if start provided', () => {
const start = 5;
expect(node.getText(start)).toEqual(initialText.slice(start));
});
it('should return sliced value if end provided', () => {
const end = 6;
expect(node.getText(0, end)).toEqual(initialText.slice(0, end));
});
it('should return sliced value if full range provided', () => {
const start = 3;
const end = 9;
expect(node.getText(start, end)).toEqual(initialText.slice(start, end));
});
it('should throw an error if start is invalid index', () => {
expect(() => node.getText(-1)).toThrowError();
expect(() => node.getText(initialText.length + 1)).toThrowError();
});
it('should throw an error if end is invalid index', () => {
expect(() => node.getText(0, initialText.length + 1)).toThrowError();
});
it('should throw an error if end index is greater than start index', () => {
const start = 5;
const end = 3;
expect(() => node.getText(start, end)).toThrowError();
});
it('should not throw an error if end index is equal to start index', () => {
const start = 5;
const end = 5;
expect(() => node.getText(start, end)).not.toThrowError();
});
});
describe('.removeText()', () => {
it('should remove all text by default', () => {
node.removeText();
expect(node.getText()).toEqual('');
});
it('should remove text from specified index', () => {
const start = 3;
node.removeText(start);
expect(node.getText()).toEqual(initialText.slice(0, start));
});
it('should remove text from 0 to specified end index', () => {
const end = 8;
node.removeText(0, end);
expect(node.getText()).toEqual(initialText.slice(end));
});
it('should remove text from specified start and end indecies', () => {
const start = 3;
const end = 8;
node.removeText(start, end);
expect(node.getText()).toEqual(initialText.slice(0, start) + initialText.slice(end));
});
it('should call remove() method if node is empty after removeText() call', () => {
jest.spyOn(node, 'remove');
node.removeText();
expect(node.remove).toBeCalled();
});
});
describe('.format()', () => {
it('should return just one FormattingNode, if formatting full TextNode', () => {
const name = createInlineToolName('bold');
const | fragments = node.format(name, 0, initialText.length); |
expect(fragments).toHaveLength(1);
expect(fragments[0]).toBeInstanceOf(FormattingNode);
});
it('should return two fragments if formatting from the start, but not to the end', () => {
const name = createInlineToolName('bold');
const end = 5;
const fragments = node.format(name, 0, end);
expect(fragments).toHaveLength(2);
expect(fragments[0]).toBeInstanceOf(FormattingNode);
expect(fragments[1]).toBeInstanceOf(TextNode);
});
it('should return two fragments if formatting to the end, but not from the start', () => {
const name = createInlineToolName('bold');
const start = 5;
const fragments = node.format(name, start, initialText.length);
expect(fragments).toHaveLength(2);
expect(fragments[0]).toBeInstanceOf(TextNode);
expect(fragments[1]).toBeInstanceOf(FormattingNode);
});
it('should return three fragments if formatting in the middle', () => {
const name = createInlineToolName('bold');
const start = 5;
const end = 8;
const fragments = node.format(name, start, end);
// eslint-disable-next-line @typescript-eslint/no-magic-numbers
expect(fragments).toHaveLength(3);
expect(fragments[0]).toBeInstanceOf(TextNode);
expect(fragments[1]).toBeInstanceOf(FormattingNode);
expect(fragments[2]).toBeInstanceOf(TextNode);
});
it('should return FormattingNode with a TextNode as a child with correct text value', () => {
const name = createInlineToolName('bold');
const start = 5;
const end = 8;
const fragments = node.format(name, start, end);
const formattingNode = fragments[1] as FormattingNode;
expect(formattingNode.children[0].getText()).toEqual(initialText.slice(start, end));
});
it('should call parent\'s insertAfter with new nodes', () => {
const name = createInlineToolName('bold');
const start = 5;
const end = 8;
const fragments = node.format(name, start, end);
expect(parentMock.insertAfter).toBeCalledWith(node, ...fragments);
});
});
describe('.split()', () => {
const index = 5;
it('should not split (return null) if index is 0', () => {
const newNode = node.split(0);
expect(newNode).toBeNull();
});
it('should not split (return null) if index equals text length', () => {
const newNode = node.split(initialText.length);
expect(newNode).toBeNull();
});
it('should create new TextNode on split', () => {
const newNode = node.split(index);
expect(newNode).toBeInstanceOf(TextNode);
});
it('should create new TextNode with text value splitted from the original one', () => {
const newNode = node.split(index);
expect(newNode?.getText()).toEqual(initialText.slice(index));
});
it('should remove split text value from the original node', () => {
node.split(index);
expect(node.getText()).toEqual(initialText.slice(0, index));
});
it('should insert new node to the parent', () => {
const newNode = node.split(index);
expect(parentMock.insertAfter).toBeCalledWith(node, newNode);
});
});
describe('.serialized', () => {
it('should return text value and empty array of fragments', () => {
const result = node.serialized;
expect(result).toEqual({
text: initialText,
fragments: [],
});
});
});
});
| src/entities/TextNode/TextNode.spec.ts | editor-js-document-model-4cb9623 | [
{
"filename": "src/entities/FormattingNode/FormattingNode.spec.ts",
"retrieved_chunk": " it('should remove all text by default', () => {\n node.removeText();\n expect(childMock.removeText).toBeCalledWith(0, childMock.length);\n expect(anotherChildMock.removeText).toBeCalledWith(0, anotherChildMock.length);\n });\n it('should call remove() if length is 0 after removeText() call', () => {\n const removeSpy = jest.spyOn(node, 'remove');\n const lengthSpy = jest.spyOn(node, 'length', 'get').mockImplementation(() => 0);\n node.removeText();\n expect(removeSpy).toBeCalled();",
"score": 37.96615762532811
},
{
"filename": "src/entities/FormattingNode/FormattingNode.spec.ts",
"retrieved_chunk": " expect(anotherChildMock.format).toBeCalledWith(anotherTool, 0, end, undefined);\n });\n it('should do nothing if same tool is being applied', () => {\n node.format(tool, start, end);\n expect(childMock.format).not.toBeCalled();\n expect(anotherChildMock.format).not.toBeCalled();\n });\n it('should return empty array if same tool is being applied', () => {\n const result = node.format(tool, start, end);\n expect(result).toHaveLength(0);",
"score": 28.03460302606054
},
{
"filename": "src/entities/FormattingNode/FormattingNode.spec.ts",
"retrieved_chunk": "const createChildMock = (value: string): TextNode => ({\n getText: jest.fn(() => value),\n appendTo: jest.fn(),\n insertText: jest.fn(),\n removeText: jest.fn(),\n split: jest.fn(() => null),\n format: jest.fn(() => [ new FormattingNode({ tool: createInlineToolName('tool') }) ]),\n length: value.length,\n} as unknown as TextNode);\ndescribe('FormattingNode', () => {",
"score": 27.525159933005774
},
{
"filename": "src/entities/FormattingNode/FormattingNode.spec.ts",
"retrieved_chunk": " const newNode = node.split(index);\n expect(parentMock.insertAfter).toBeCalledWith(node, newNode);\n });\n });\n describe('.format()', () => {\n const start = 3;\n const end = 5;\n it('should apply formatting to the relevant child', () => {\n node.format(anotherTool, start, end);\n expect(childMock.format).toBeCalledWith(anotherTool, start, end, undefined);",
"score": 24.031237838446202
},
{
"filename": "src/entities/FormattingNode/FormattingNode.spec.ts",
"retrieved_chunk": " });\n it('should return array of new formatting nodes', () => {\n const result = node.format(anotherTool, start, end);\n expect(result).toEqual(childMock.format(anotherTool, start, end));\n });\n });\n});",
"score": 22.605341340680592
}
] | typescript | fragments = node.format(name, 0, initialText.length); |
import { describe, it, expect, beforeEach } from '@jest/globals';
import { TextNode } from './index';
import { createInlineToolName, FormattingNode } from '../FormattingNode';
import type { ParentNode } from '../interfaces';
describe('TextNode', () => {
const initialText = 'initial text';
const text = 'some text';
const parentMock = {
insertAfter: jest.fn(),
removeChild: jest.fn(),
append: jest.fn(),
children: [],
} as unknown as ParentNode;
let node: TextNode;
beforeEach(() => {
node = new TextNode({
value: initialText,
parent: parentMock as FormattingNode,
});
});
it('should have empty value by default', () => {
node = new TextNode();
expect(node.getText()).toEqual('');
});
describe('.insertText()', () => {
it('should set text to value if node is empty', () => {
node = new TextNode();
node.insertText(text);
expect(node.getText()).toEqual(text);
});
it('should append text if not empty', () => {
node.insertText(text);
expect(node.getText()).toEqual(initialText + text);
});
it('should prepend text if index is 0 and node is not empty', () => {
node.insertText(text, 0);
expect(node.getText()).toEqual(text + initialText);
});
it('should insert text at index if not empty', () => {
const index = 5;
node.insertText(text, index);
expect(node.getText()).toEqual(initialText.slice(0, index) + text + initialText.slice(index));
});
it('should throw an error if index is less than 0', () => {
const f = (): void => node.insertText(text, -1);
expect(f).toThrowError();
});
it('should throw an error if index is greater than node length', () => {
const f = (): void => node.insertText(text, initialText.length + 1);
expect(f).toThrowError();
});
});
describe('.getText()', () => {
it('should return sliced value if start provided', () => {
const start = 5;
expect(node.getText(start)).toEqual(initialText.slice(start));
});
it('should return sliced value if end provided', () => {
const end = 6;
expect(node.getText(0, end)).toEqual(initialText.slice(0, end));
});
it('should return sliced value if full range provided', () => {
const start = 3;
const end = 9;
expect(node.getText(start, end)).toEqual(initialText.slice(start, end));
});
it('should throw an error if start is invalid index', () => {
expect(() => node.getText(-1)).toThrowError();
expect(() => node.getText(initialText.length + 1)).toThrowError();
});
it('should throw an error if end is invalid index', () => {
expect(() => node.getText(0, initialText.length + 1)).toThrowError();
});
it('should throw an error if end index is greater than start index', () => {
const start = 5;
const end = 3;
expect(() => node.getText(start, end)).toThrowError();
});
it('should not throw an error if end index is equal to start index', () => {
const start = 5;
const end = 5;
expect(() => node.getText(start, end)).not.toThrowError();
});
});
describe('.removeText()', () => {
it('should remove all text by default', () => {
node.removeText();
expect(node.getText()).toEqual('');
});
it('should remove text from specified index', () => {
const start = 3;
node.removeText(start);
expect(node.getText()).toEqual(initialText.slice(0, start));
});
it('should remove text from 0 to specified end index', () => {
const end = 8;
node.removeText(0, end);
expect(node.getText()).toEqual(initialText.slice(end));
});
it('should remove text from specified start and end indecies', () => {
const start = 3;
const end = 8;
node.removeText(start, end);
expect(node.getText()).toEqual(initialText.slice(0, start) + initialText.slice(end));
});
it('should call remove() method if node is empty after removeText() call', () => {
| jest.spyOn(node, 'remove'); |
node.removeText();
expect(node.remove).toBeCalled();
});
});
describe('.format()', () => {
it('should return just one FormattingNode, if formatting full TextNode', () => {
const name = createInlineToolName('bold');
const fragments = node.format(name, 0, initialText.length);
expect(fragments).toHaveLength(1);
expect(fragments[0]).toBeInstanceOf(FormattingNode);
});
it('should return two fragments if formatting from the start, but not to the end', () => {
const name = createInlineToolName('bold');
const end = 5;
const fragments = node.format(name, 0, end);
expect(fragments).toHaveLength(2);
expect(fragments[0]).toBeInstanceOf(FormattingNode);
expect(fragments[1]).toBeInstanceOf(TextNode);
});
it('should return two fragments if formatting to the end, but not from the start', () => {
const name = createInlineToolName('bold');
const start = 5;
const fragments = node.format(name, start, initialText.length);
expect(fragments).toHaveLength(2);
expect(fragments[0]).toBeInstanceOf(TextNode);
expect(fragments[1]).toBeInstanceOf(FormattingNode);
});
it('should return three fragments if formatting in the middle', () => {
const name = createInlineToolName('bold');
const start = 5;
const end = 8;
const fragments = node.format(name, start, end);
// eslint-disable-next-line @typescript-eslint/no-magic-numbers
expect(fragments).toHaveLength(3);
expect(fragments[0]).toBeInstanceOf(TextNode);
expect(fragments[1]).toBeInstanceOf(FormattingNode);
expect(fragments[2]).toBeInstanceOf(TextNode);
});
it('should return FormattingNode with a TextNode as a child with correct text value', () => {
const name = createInlineToolName('bold');
const start = 5;
const end = 8;
const fragments = node.format(name, start, end);
const formattingNode = fragments[1] as FormattingNode;
expect(formattingNode.children[0].getText()).toEqual(initialText.slice(start, end));
});
it('should call parent\'s insertAfter with new nodes', () => {
const name = createInlineToolName('bold');
const start = 5;
const end = 8;
const fragments = node.format(name, start, end);
expect(parentMock.insertAfter).toBeCalledWith(node, ...fragments);
});
});
describe('.split()', () => {
const index = 5;
it('should not split (return null) if index is 0', () => {
const newNode = node.split(0);
expect(newNode).toBeNull();
});
it('should not split (return null) if index equals text length', () => {
const newNode = node.split(initialText.length);
expect(newNode).toBeNull();
});
it('should create new TextNode on split', () => {
const newNode = node.split(index);
expect(newNode).toBeInstanceOf(TextNode);
});
it('should create new TextNode with text value splitted from the original one', () => {
const newNode = node.split(index);
expect(newNode?.getText()).toEqual(initialText.slice(index));
});
it('should remove split text value from the original node', () => {
node.split(index);
expect(node.getText()).toEqual(initialText.slice(0, index));
});
it('should insert new node to the parent', () => {
const newNode = node.split(index);
expect(parentMock.insertAfter).toBeCalledWith(node, newNode);
});
});
describe('.serialized', () => {
it('should return text value and empty array of fragments', () => {
const result = node.serialized;
expect(result).toEqual({
text: initialText,
fragments: [],
});
});
});
});
| src/entities/TextNode/TextNode.spec.ts | editor-js-document-model-4cb9623 | [
{
"filename": "src/entities/FormattingNode/FormattingNode.spec.ts",
"retrieved_chunk": " });\n });\n describe('.removeText()', () => {\n const start = 3;\n const end = 5;\n it('should remove text from appropriate child', () => {\n node.removeText(start, end);\n expect(childMock.removeText).toBeCalledWith(start, end);\n });\n it('should adjust indices by child offset', () => {",
"score": 59.673461427143145
},
{
"filename": "src/entities/FormattingNode/FormattingNode.spec.ts",
"retrieved_chunk": " it('should remove all text by default', () => {\n node.removeText();\n expect(childMock.removeText).toBeCalledWith(0, childMock.length);\n expect(anotherChildMock.removeText).toBeCalledWith(0, anotherChildMock.length);\n });\n it('should call remove() if length is 0 after removeText() call', () => {\n const removeSpy = jest.spyOn(node, 'remove');\n const lengthSpy = jest.spyOn(node, 'length', 'get').mockImplementation(() => 0);\n node.removeText();\n expect(removeSpy).toBeCalled();",
"score": 56.45756159475618
},
{
"filename": "src/entities/FormattingNode/FormattingNode.spec.ts",
"retrieved_chunk": " removeSpy.mockRestore();\n lengthSpy.mockRestore();\n });\n });\n describe('.getText()', () => {\n const start = 3;\n const end = 5;\n it('should call getText() for the relevant child', () => {\n node.getText(start, end);\n expect(childMock.getText).toBeCalledWith(start, end);",
"score": 55.29471124601697
},
{
"filename": "src/entities/FormattingNode/FormattingNode.spec.ts",
"retrieved_chunk": " const offset = childMock.length;\n node.removeText(offset + start, offset + end);\n expect(anotherChildMock.removeText).toBeCalledWith(start, end);\n });\n it('should call removeText for each affected child', () => {\n const offset = childMock.length;\n node.removeText(start, offset + end);\n expect(childMock.removeText).toBeCalledWith(start, offset);\n expect(anotherChildMock.removeText).toBeCalledWith(0, end);\n });",
"score": 54.8268514731959
},
{
"filename": "src/entities/FormattingNode/FormattingNode.spec.ts",
"retrieved_chunk": " });\n it('should adjust index by child offset', () => {\n const offset = childMock.length;\n node.getText(offset + start, offset + end);\n expect(anotherChildMock.getText).toBeCalledWith(start, end);\n });\n it('should call getText for all relevant children', () => {\n const offset = childMock.length;\n node.getText(start, offset + end);\n expect(childMock.getText).toBeCalledWith(start, offset);",
"score": 51.223425026332094
}
] | typescript | jest.spyOn(node, 'remove'); |
import fs from 'fs/promises';
import inquirer from 'inquirer';
import pQueue from 'p-queue';
import { setTimeout as delay } from 'timers/promises';
import { Injectable, Logger } from '@nestjs/common';
import { Account as IAccount } from '../../interfaces/account.interface';
import { Secrets } from '../../interfaces/secrets.interface';
class Account implements IAccount {
public readonly username: string;
public readonly password: string;
public sharedSecret: string | null = null;
public identitySecret: string | null = null;
constructor(account: string) {
account = account.trim();
if (account.length === 0) throw new Error('Invalid account');
const parts = account.split(':').map((part) => part.trim());
if (parts.length < 2) throw new Error('Invalid account');
const [username, password, sharedSecret, identitySecret] = parts;
this.username = username;
this.password = password;
if (sharedSecret) this.sharedSecret = sharedSecret;
if (identitySecret) this.identitySecret = identitySecret;
}
}
@Injectable()
export class AccountsImportService {
private readonly logger = new Logger(AccountsImportService.name);
private readonly readFilesQueue = new pQueue({ concurrency: 100 });
public async loadAccounts(input: string[] | string) {
if (!input) return [];
if (!Array.isArray(input)) input = [input];
if (input.length === 0) return [];
let accounts: Account[] = [];
const errors: string[] = [];
const readResults = await Promise.all(input.map((input) => this.readAccountsFromInput(input)));
for (const result of readResults) {
accounts.push(...result.values);
errors.push(...result.errors);
}
accounts = this.removeDuplicates(accounts);
if (errors.length > 0 && accounts.length > 0) {
this.logger.warn(`The following account sources are invalid:\n${errors.join('\n')}`);
await delay(1000);
const { confirm } = await inquirer.prompt({
type: 'confirm',
name: 'confirm',
message: 'Continue with the valid accounts?',
default: false,
});
if (!confirm) throw new Error('Aborted by user');
}
return accounts;
}
public assignSecretsToAccounts( | accounts: Account[], secrets: Secrets[]) { |
const secretsMap = new Map<string, Secrets>();
for (const secret of secrets) {
secretsMap.set(secret.username, secret);
// some existing steam-oriented apps are case-insensitive to usernames in secrets
secretsMap.set(secret.username.toLowerCase(), secret);
}
for (const account of accounts) {
let secret = secretsMap.get(account.username);
if (!secret) secret = secretsMap.get(account.username.toLowerCase());
if (!secret) continue;
account.sharedSecret = secret.sharedSecret;
account.identitySecret = secret.identitySecret;
}
}
private removeDuplicates(accounts: Account[]) {
const map = new Map<string, Account>();
for (const account of accounts) map.set(account.username, account);
return [...map.values()];
}
private async readAccountsFromInput(input: string) {
const inputType = await this.inferInputType(input);
if (inputType === 'file') return this.readAccountsFromFile(input);
if (inputType === 'string') return this.readAccountFromString(input);
if (inputType === 'directory') return { values: [], errors: [input] };
}
private async readAccountsFromFile(filePath: string) {
const result: { values: Account[]; errors: string[] } = { values: [], errors: [] };
try {
let content = await this.readFilesQueue.add(() => fs.readFile(filePath, 'utf-8'));
content = content.trim();
if (content.length === 0) throw new Error('Empty file');
// session file
if (filePath.endsWith('.steamsession')) {
const readResults = this.readAccountFromSessionFile(content);
result.values.push(...readResults.values);
if (readResults.errors.length > 0) result.errors.push(filePath);
return result;
}
// asf json
if (filePath.endsWith('.json') && content.includes('"SteamLogin"')) {
const readResults = this.readAccountFromAsfJson(content);
result.values.push(...readResults.values);
if (readResults.errors.length > 0) result.errors.push(filePath);
return result;
}
// plain text
if (content.includes(':')) {
const lines = content
.split(/\s+|\r?\n/)
.map((l) => l.trim())
.filter((l) => l.length > 0);
if (lines.length === 0) throw new Error('Empty file');
for (const line of lines) {
const readResults = this.readAccountFromString(line);
result.values.push(...readResults.values);
if (readResults.errors.length > 0) result.errors.push(line);
}
return result;
}
throw new Error('Unsupported file format');
} catch (error) {
result.errors.push(filePath);
}
return result;
}
private readAccountFromString(str: string) {
const result: { values: Account[]; errors: string[] } = { values: [], errors: [] };
try {
const account = new Account(str);
result.values.push(account);
} catch (error) {
result.errors.push(str);
}
return result;
}
private readAccountFromAsfJson(fileContent: string) {
const result: { values: Account[]; errors: string[] } = { values: [], errors: [] };
try {
const { SteamLogin: username, SteamPassword: password } = JSON.parse(fileContent);
if (!username) throw new Error('Invalid username');
if (!password) throw new Error('Invalid password');
const account = new Account(`${username}:${password}`);
result.values.push(account);
} catch (error) {
result.errors.push(fileContent);
}
return result;
}
private readAccountFromSessionFile(fileContent: string) {
const result: { values: Account[]; errors: string[] } = { values: [], errors: [] };
try {
const { Username, Password, SharedSecret, IdentitySecret } = JSON.parse(fileContent);
if (!Username) throw new Error('Invalid username');
if (!Password) throw new Error('Invalid password');
const account = new Account(`${Username}:${Password}:${SharedSecret || ''}:${IdentitySecret || ''}`);
result.values.push(account);
} catch (error) {
result.errors.push(fileContent);
}
return result;
}
private async inferInputType(input: string) {
if (typeof input !== 'string') throw new Error(`Invalid input type: '${typeof input}'`);
try {
const stats = await fs.stat(input);
if (stats.isFile()) return 'file';
if (stats.isDirectory()) return 'directory';
} catch (error) {
return 'string';
}
}
}
| src/modules/accounts-import/accounts-import.service.ts | Sadzurami-steam-sessions-creator-97b7294 | [
{
"filename": "src/modules/secrets-import/secrets-import.service.ts",
"retrieved_chunk": " default: false,\n });\n if (!confirm) throw new Error('Aborted by user');\n }\n return secrets;\n }\n private removeDuplicates(secrets: Secrets[]) {\n const map = new Map<string, Secrets>();\n for (const secret of secrets) map.set(secret.username, secret);\n return [...map.values()];",
"score": 34.075726747485724
},
{
"filename": "src/modules/sessions-import/sessions-import.service.ts",
"retrieved_chunk": " }\n sessions = this.removeDuplicates(sessions);\n if (errors.length > 0 && sessions.length > 0) {\n this.logger.warn(`The following session sources are invalid:\\n${errors.join('\\n')}`);\n await delay(1000);\n const { confirm } = await inquirer.prompt({\n type: 'confirm',\n name: 'confirm',\n message: 'Continue with the valid sessions?',\n default: false,",
"score": 31.25311309055457
},
{
"filename": "src/modules/proxies-import/proxies-import.service.ts",
"retrieved_chunk": " proxies = this.removeDuplicates(proxies);\n if (errors.length > 0) {\n this.logger.warn(`The following proxy sources are invalid:\\n${errors.join('\\n')}`);\n await delay(1000);\n const { confirm } = await inquirer.prompt({\n type: 'confirm',\n name: 'confirm',\n message: proxies.length > 0 ? `Continue with ${proxies.length} valid proxies ?` : 'Continue without proxies ?',\n default: false,\n });",
"score": 28.683128940802185
},
{
"filename": "src/modules/secrets-import/secrets-import.service.ts",
"retrieved_chunk": " }\n secrets = this.removeDuplicates(secrets);\n if (errors.length > 0) {\n this.logger.warn(`The following secret sources are invalid:\\n${errors.join('\\n')}`);\n await delay(1000);\n const { confirm } = await inquirer.prompt({\n type: 'confirm',\n name: 'confirm',\n message:\n secrets.length > 0 ? `Continue with ${secrets.length} valid secrets?` : 'Continue without any secrets?',",
"score": 27.740762023008287
},
{
"filename": "src/modules/sessions-import/sessions-import.service.ts",
"retrieved_chunk": " });\n if (!confirm) throw new Error('Aborted by user');\n }\n return sessions;\n }\n private removeDuplicates(sessions: Session[]) {\n const map = new Map<string, Session>();\n for (const session of sessions) map.set(session.username, session);\n return [...map.values()];\n }",
"score": 21.802717708582037
}
] | typescript | accounts: Account[], secrets: Secrets[]) { |
import { FormattingNode, InlineToolName, InlineToolData } from '../FormattingNode';
import { TextNodeConstructorParameters } from './types';
import { ChildNode, InlineNode, InlineNodeSerialized } from '../interfaces';
export * from './types';
export interface TextNode extends ChildNode {}
/**
* TextNode class represents a node in a tree-like structure, used to store and manipulate text content.
*/
@ChildNode
export class TextNode implements InlineNode {
/**
* Private field representing the text content of the node
*/
#value: string;
/**
* Constructor for TextNode class
*
* @param args - TextNode constructor arguments.
* @param args.value - Text content of the node.
*/
constructor({ value = '' }: TextNodeConstructorParameters = {}) {
this.#value = value;
}
/**
* Returns length of the text
*/
public get length(): number {
return this.#value.length;
}
/**
* Returns serialized value of the node
*/
public get serialized(): InlineNodeSerialized {
return {
text: this.getText(),
// No fragments for text node
fragments: [],
};
}
/**
* Inserts text to specified position. By default, appends new text to the current value
*
* @param text - text to insert
* @param [index] - char start index
*/
public insertText(text: string, index = this.length): void {
this.#validateIndex(index);
this.#value = this.#value.slice(0, index) + text + this.#value.slice(index);
}
/**
* Remove text from specified range
*
* @param [start] - start char index of the range, 0 by default
* @param [end] - end char index of the range, text length by default
* @returns {string} removed text
*/
public removeText(start = 0, end = this.length): string {
this.#validateIndex(start);
this.#validateIndex(end);
const removedValue = this.#value.slice(start, end);
this.#value = this.#value.slice(0, start) + this.#value.slice(end);
if (this.length === 0) {
this.remove();
}
return removedValue;
}
/**
* Returns text value from the specified range
*
* @param [start] - start char index of the range, 0 by default
* @param [end] - end char index of the range, text length by default
*/
public getText(start = 0, end = this.length): string {
if (start > end) {
// Stryker disable next-line StringLiteral
throw new Error(`Start index ${start} should be less or equal than end index ${end}`);
}
this.#validateIndex(start);
this.#validateIndex(end);
return this.#value.slice(start, end);
}
/**
* Applies inline tool for specified range
*
* @param tool - name of the tool to apply
* @param start - start char index of the range
* @param end - end char index of the range
* @param [data] - inline tool data if applicable
* @returns {InlineNode[]} - array of nodes after applied formatting
*/
public format(tool: InlineToolName, start: number, end: number, data?: InlineToolData): InlineNode[] {
this.#validateIndex(start);
this.#validateIndex(end);
const formattingNode = new FormattingNode({
tool,
data,
});
const fragments: ChildNode[] = [];
/**
* If start index is greater than 0, we need extract part of the text before the start index
*/
if (start > 0) {
fragments.push(this.#cloneContents(0, start));
}
/**
* Formatting is applied to the specified range
*/
const formattedFragment = this.#cloneContents(start, end);
| formattedFragment.appendTo(formattingNode); |
fragments.push(formattingNode);
/**
* If end index is less than the text length, we need to extract part of the text after the end index
*/
if (end < this.length) {
fragments.push(this.#cloneContents(end, this.length));
}
this.parent?.insertAfter(this, ...fragments);
this.remove();
return fragments;
}
/**
* Splits current node into two nodes by the specified index
*
* @param index - char index where to split
* @returns {TextNode|null} - new node or null if split is not applicable
*/
public split(index: number): TextNode | null {
if (index === 0 || index === this.length) {
return null;
}
const newNode = new TextNode();
const text = this.removeText(index);
newNode.insertText(text);
this.parent?.insertAfter(this, newNode);
return newNode;
}
/**
* Validates index
*
* @param index - char index to validate
* @throws Error if index is out of the text length
*/
#validateIndex(index: number): void {
if (index < 0 || index > this.length) {
// Stryker disable next-line StringLiteral
throw new Error(`Index ${index} is not in valid range [0, ${this.length}]`);
}
}
/**
* Clones specified range to a new TextNode
*
* @param start - start char index of the range
* @param end - end char index of the range
*/
#cloneContents(start: number, end: number): TextNode {
return new TextNode({
value: this.getText(start, end),
});
}
}
| src/entities/TextNode/index.ts | editor-js-document-model-4cb9623 | [
{
"filename": "src/entities/FormattingNode/index.ts",
"retrieved_chunk": " return result;\n }\n /**\n * Returns text from the specified range\n *\n * @param [start] - start char index of the range, by default 0\n * @param [end] - end char index of the range, by default length of the text value\n */\n public getText(start = 0, end = this.length): string {\n return this.#reduceChildrenInRange(",
"score": 37.1901535217018
},
{
"filename": "src/entities/FormattingNode/index.ts",
"retrieved_chunk": " }\n /**\n * Removes text form the specified range\n *\n * @param [start] - start char index of the range, by default 0\n * @param [end] - end char index of the range, by default length of the text value\n * @returns {string} removed text\n */\n public removeText(start = 0, end = this.length): string {\n const result = this.#reduceChildrenInRange(",
"score": 36.888167184998366
},
{
"filename": "src/entities/TextNode/TextNode.spec.ts",
"retrieved_chunk": " it('should throw an error if end is invalid index', () => {\n expect(() => node.getText(0, initialText.length + 1)).toThrowError();\n });\n it('should throw an error if end index is greater than start index', () => {\n const start = 5;\n const end = 3;\n expect(() => node.getText(start, end)).toThrowError();\n });\n it('should not throw an error if end index is equal to start index', () => {\n const start = 5;",
"score": 36.44221569844943
},
{
"filename": "src/entities/FormattingNode/index.ts",
"retrieved_chunk": " *\n * @param [start] - start char index of the range, by default 0\n * @param [end] - end char index of the range, by default length of the text value\n */\n public getFragments(start = 0, end = this.length): InlineFragment[] {\n return this.#reduceChildrenInRange<InlineFragment[]>(\n start,\n end,\n (acc, child, childStart, childEnd) => {\n /**",
"score": 35.4368463683271
},
{
"filename": "src/entities/TextNode/TextNode.spec.ts",
"retrieved_chunk": " const start = 3;\n node.removeText(start);\n expect(node.getText()).toEqual(initialText.slice(0, start));\n });\n it('should remove text from 0 to specified end index', () => {\n const end = 8;\n node.removeText(0, end);\n expect(node.getText()).toEqual(initialText.slice(end));\n });\n it('should remove text from specified start and end indecies', () => {",
"score": 31.135742093079166
}
] | typescript | formattedFragment.appendTo(formattingNode); |
import { FormattingNode, InlineToolName, InlineToolData } from '../FormattingNode';
import { TextNodeConstructorParameters } from './types';
import { ChildNode, InlineNode, InlineNodeSerialized } from '../interfaces';
export * from './types';
export interface TextNode extends ChildNode {}
/**
* TextNode class represents a node in a tree-like structure, used to store and manipulate text content.
*/
@ChildNode
export class TextNode implements InlineNode {
/**
* Private field representing the text content of the node
*/
#value: string;
/**
* Constructor for TextNode class
*
* @param args - TextNode constructor arguments.
* @param args.value - Text content of the node.
*/
constructor({ value = '' }: TextNodeConstructorParameters = {}) {
this.#value = value;
}
/**
* Returns length of the text
*/
public get length(): number {
return this.#value.length;
}
/**
* Returns serialized value of the node
*/
public get serialized(): InlineNodeSerialized {
return {
text: this.getText(),
// No fragments for text node
fragments: [],
};
}
/**
* Inserts text to specified position. By default, appends new text to the current value
*
* @param text - text to insert
* @param [index] - char start index
*/
public insertText(text: string, index = this.length): void {
this.#validateIndex(index);
this.#value = this.#value.slice(0, index) + text + this.#value.slice(index);
}
/**
* Remove text from specified range
*
* @param [start] - start char index of the range, 0 by default
* @param [end] - end char index of the range, text length by default
* @returns {string} removed text
*/
public removeText(start = 0, end = this.length): string {
this.#validateIndex(start);
this.#validateIndex(end);
const removedValue = this.#value.slice(start, end);
this.#value = this.#value.slice(0, start) + this.#value.slice(end);
if (this.length === 0) {
this.remove();
}
return removedValue;
}
/**
* Returns text value from the specified range
*
* @param [start] - start char index of the range, 0 by default
* @param [end] - end char index of the range, text length by default
*/
public getText(start = 0, end = this.length): string {
if (start > end) {
// Stryker disable next-line StringLiteral
throw new Error(`Start index ${start} should be less or equal than end index ${end}`);
}
this.#validateIndex(start);
this.#validateIndex(end);
return this.#value.slice(start, end);
}
/**
* Applies inline tool for specified range
*
* @param tool - name of the tool to apply
* @param start - start char index of the range
* @param end - end char index of the range
* @param [data] - inline tool data if applicable
* @returns {InlineNode[]} - array of nodes after applied formatting
*/
public format(tool: InlineToolName, start: number, end: number, data?: InlineToolData): InlineNode[] {
this.#validateIndex(start);
this.#validateIndex(end);
const formattingNode = new FormattingNode({
tool,
data,
});
const fragments: ChildNode[] = [];
/**
* If start index is greater than 0, we need extract part of the text before the start index
*/
if (start > 0) {
fragments.push(this.#cloneContents(0, start));
}
/**
* Formatting is applied to the specified range
*/
const formattedFragment = this.#cloneContents(start, end);
formattedFragment.appendTo(formattingNode);
| fragments.push(formattingNode); |
/**
* If end index is less than the text length, we need to extract part of the text after the end index
*/
if (end < this.length) {
fragments.push(this.#cloneContents(end, this.length));
}
this.parent?.insertAfter(this, ...fragments);
this.remove();
return fragments;
}
/**
* Splits current node into two nodes by the specified index
*
* @param index - char index where to split
* @returns {TextNode|null} - new node or null if split is not applicable
*/
public split(index: number): TextNode | null {
if (index === 0 || index === this.length) {
return null;
}
const newNode = new TextNode();
const text = this.removeText(index);
newNode.insertText(text);
this.parent?.insertAfter(this, newNode);
return newNode;
}
/**
* Validates index
*
* @param index - char index to validate
* @throws Error if index is out of the text length
*/
#validateIndex(index: number): void {
if (index < 0 || index > this.length) {
// Stryker disable next-line StringLiteral
throw new Error(`Index ${index} is not in valid range [0, ${this.length}]`);
}
}
/**
* Clones specified range to a new TextNode
*
* @param start - start char index of the range
* @param end - end char index of the range
*/
#cloneContents(start: number, end: number): TextNode {
return new TextNode({
value: this.getText(start, end),
});
}
}
| src/entities/TextNode/index.ts | editor-js-document-model-4cb9623 | [
{
"filename": "src/entities/TextNode/TextNode.spec.ts",
"retrieved_chunk": " const formattingNode = fragments[1] as FormattingNode;\n expect(formattingNode.children[0].getText()).toEqual(initialText.slice(start, end));\n });\n it('should call parent\\'s insertAfter with new nodes', () => {\n const name = createInlineToolName('bold');\n const start = 5;\n const end = 8;\n const fragments = node.format(name, start, end);\n expect(parentMock.insertAfter).toBeCalledWith(node, ...fragments);\n });",
"score": 33.06591277112542
},
{
"filename": "src/entities/TextNode/TextNode.spec.ts",
"retrieved_chunk": " const end = 5;\n const fragments = node.format(name, 0, end);\n expect(fragments).toHaveLength(2);\n expect(fragments[0]).toBeInstanceOf(FormattingNode);\n expect(fragments[1]).toBeInstanceOf(TextNode);\n });\n it('should return two fragments if formatting to the end, but not from the start', () => {\n const name = createInlineToolName('bold');\n const start = 5;\n const fragments = node.format(name, start, initialText.length);",
"score": 25.24650785170278
},
{
"filename": "src/entities/FormattingNode/index.ts",
"retrieved_chunk": " return result;\n }\n /**\n * Returns text from the specified range\n *\n * @param [start] - start char index of the range, by default 0\n * @param [end] - end char index of the range, by default length of the text value\n */\n public getText(start = 0, end = this.length): string {\n return this.#reduceChildrenInRange(",
"score": 23.490025082411492
},
{
"filename": "src/entities/FormattingNode/index.ts",
"retrieved_chunk": " }\n /**\n * Removes text form the specified range\n *\n * @param [start] - start char index of the range, by default 0\n * @param [end] - end char index of the range, by default length of the text value\n * @returns {string} removed text\n */\n public removeText(start = 0, end = this.length): string {\n const result = this.#reduceChildrenInRange(",
"score": 23.04668347809475
},
{
"filename": "src/entities/FormattingNode/index.ts",
"retrieved_chunk": " *\n * @param [start] - start char index of the range, by default 0\n * @param [end] - end char index of the range, by default length of the text value\n */\n public getFragments(start = 0, end = this.length): InlineFragment[] {\n return this.#reduceChildrenInRange<InlineFragment[]>(\n start,\n end,\n (acc, child, childStart, childEnd) => {\n /**",
"score": 21.920004428957007
}
] | typescript | fragments.push(formattingNode); |
import { describe, it, expect, beforeEach, jest } from '@jest/globals';
import { ChildNode } from './ChildNode';
import type { ParentNode } from './ParentNode';
const parentMock = {
append: jest.fn(),
removeChild: jest.fn(),
insertAfter: jest.fn(),
children: [],
} as unknown as ParentNode;
interface Dummy extends ChildNode {
}
/**
* Dummy Node's class
*/
@ChildNode
class Dummy {
/**
*
* @param _options - dummy options
*/
// eslint-disable-next-line @typescript-eslint/no-empty-function,@typescript-eslint/no-unused-vars,no-unused-vars
constructor(_options?: unknown) {}
}
describe('ChildNode decorator', () => {
let dummy: Dummy;
beforeEach(() => {
jest.resetAllMocks();
});
it('should decorated class to a parent', () => {
dummy = new Dummy({ parent: parentMock });
expect(parentMock.append).toBeCalledWith(dummy);
});
it('should add remove method to the decorated class', () => {
expect | (dummy.remove).toBeInstanceOf(Function); |
});
it('should add appendTo method to the decorated class', () => {
expect(dummy.appendTo).toBeInstanceOf(Function);
});
describe('.parent', () => {
it('should return null by default', () => {
dummy = new Dummy();
expect(dummy.parent).toBeNull();
});
it('should return parent passed via constructor', () => {
dummy = new Dummy({ parent: parentMock });
expect(dummy.parent).toEqual(parentMock);
});
});
describe('.remove()', () => {
beforeEach(() => {
dummy = new Dummy({
parent: parentMock,
});
});
it('should call parent\'s removeChild method', () => {
dummy.remove();
expect(parentMock.removeChild).toBeCalledWith(dummy);
});
it('should set node\'s parent to null', () => {
dummy.remove();
expect(dummy.parent).toBeNull();
});
});
describe('.appendTo()', () => {
beforeEach(() => {
dummy = new Dummy();
});
it('should call parent\'s append method on appendTo call', () => {
dummy.appendTo(parentMock);
expect(parentMock.append).toBeCalledWith(dummy);
});
it('should set node\'s parent on appendTo call', () => {
dummy.appendTo(parentMock);
expect(dummy.parent).toBe(parentMock);
});
it('should do nothing if parents are the same', () => {
const dummyWithParent = new Dummy({
parent: parentMock,
});
jest.resetAllMocks();
dummyWithParent.appendTo(parentMock);
expect(parentMock.append).not.toBeCalled();
});
});
});
| src/entities/interfaces/ChildNode.spec.ts | editor-js-document-model-4cb9623 | [
{
"filename": "src/entities/interfaces/ParentNode.spec.ts",
"retrieved_chunk": " it('should add removeChild method to the decorated class', () => {\n expect(dummy.removeChild).toBeInstanceOf(Function);\n });\n it('should add append method to the decorated class', () => {\n expect(dummy.append).toBeInstanceOf(Function);\n });\n it('should add insertAfter method to the decorated class', () => {\n expect(dummy.insertAfter).toBeInstanceOf(Function);\n });\n describe('constructor', () => {",
"score": 68.82111913497218
},
{
"filename": "src/entities/interfaces/ParentNode.spec.ts",
"retrieved_chunk": " dummy = new Dummy({\n children: [ childMock ],\n });\n dummy.removeChild(childMock);\n expect(dummy.children).toHaveLength(0);\n });\n it('should call remove method of child', () => {\n const childMock = createChildMock();\n dummy = new Dummy({\n children: [ childMock ],",
"score": 34.02311732631689
},
{
"filename": "src/entities/interfaces/ParentNode.spec.ts",
"retrieved_chunk": " it('should append passed children to new parent', () => {\n const childMock = createChildMock();\n dummy = new Dummy({\n children: [ childMock ],\n });\n expect(childMock.appendTo).toBeCalledWith(dummy);\n });\n });\n describe('.children', () => {\n it('should return empty array by default', () => {",
"score": 32.04255239045607
},
{
"filename": "src/entities/interfaces/ParentNode.spec.ts",
"retrieved_chunk": " describe('.append()', () => {\n it('should add child to the children array', () => {\n const childMock = createChildMock();\n dummy.append(childMock);\n expect(dummy.children).toContain(childMock);\n });\n it('should add several children to the children array', () => {\n const childMock = createChildMock();\n const anotherChildMock = createChildMock();\n dummy.append(childMock, anotherChildMock);",
"score": 27.95392520793191
},
{
"filename": "src/entities/interfaces/ParentNode.spec.ts",
"retrieved_chunk": " */\n // eslint-disable-next-line @typescript-eslint/no-empty-function,@typescript-eslint/no-unused-vars,no-unused-vars\n constructor(_options?: unknown) {}\n}\ndescribe('ParentNode decorator', () => {\n let dummy: Dummy;\n beforeEach(() => {\n dummy = new Dummy();\n jest.resetAllMocks();\n });",
"score": 27.62508290948353
}
] | typescript | (dummy.remove).toBeInstanceOf(Function); |
import glob from 'fast-glob';
import { CliUtilityService, Command, CommandRunner, Help, Option } from 'nest-commander';
import pQueue from 'p-queue';
import path from 'path';
import { setTimeout as delay } from 'timers/promises';
import { Logger } from '@nestjs/common';
import { AccountsImportService } from '../../modules/accounts-import/accounts-import.service';
import { ExportSessionsService } from '../../modules/export-sessions/export-sessions.service';
import { ProxiesImportService } from '../../modules/proxies-import/proxies-import.service';
import { ProxiesService } from '../../modules/proxies/proxies.service';
import { SecretsImportService } from '../../modules/secrets-import/secrets-import.service';
import { CreateSessionsService } from './create-sessions.service';
interface CreateCommandOptions {
accounts: string | string[];
secrets: string | string[];
proxies: string | string[];
concurrency: number;
output: string;
overwrite: boolean;
}
@Command({
name: 'create',
description: 'Creates new sessions',
})
export class CreateSessionsCommand extends CommandRunner {
private readonly logger = new Logger(CreateSessionsCommand.name);
constructor(
private readonly createSessionsService: CreateSessionsService,
private readonly exportSessionsService: ExportSessionsService,
private readonly accountsImportService: AccountsImportService,
private readonly secretsImportService: SecretsImportService,
private readonly proxiesImportService: ProxiesImportService,
private readonly proxiesService: ProxiesService,
) {
super();
}
public async run(args: string[], options: CreateCommandOptions) {
try {
const accountsOptionInput = await this.normalizeInput(options.accounts);
let accounts = await this.accountsImportService.loadAccounts(accountsOptionInput);
if (accounts.length === 0) throw new Error('No accounts found');
this.logger.log(`Accounts: ${accounts.length}`);
const secretsOptionInput = await this.normalizeInput(options.secrets);
| const secrets = await this.secretsImportService.loadSecrets(secretsOptionInput); |
this.logger.log(`Secrets: ${secrets.length}`);
const outputOptionInput = options.output;
if (!outputOptionInput) throw new Error('Output path is required');
const output = path.resolve(outputOptionInput);
await this.exportSessionsService.setOutputPath(output);
this.logger.log(`Output: ${output}`);
const overwriteExistingSessions = options.overwrite;
if (!overwriteExistingSessions) {
const sessionsPaths = await this.normalizeInput(`${output}/*`);
const existingSessions = await this.accountsImportService.loadAccounts(sessionsPaths);
this.logger.log(`Ignoring existing sessions: ${existingSessions.length}`);
accounts = accounts.filter((account) => !existingSessions.some((a) => a.username === account.username));
}
if (accounts.length === 0) {
this.logger.log('No accounts to create');
return;
}
const proxiesOptionInput = await this.normalizeInput(options.proxies);
const proxies = await this.proxiesImportService.loadProxies(proxiesOptionInput);
this.proxiesService.setProxies(proxies);
this.logger.log(`Proxies: ${proxies.length}`);
const concurrencyOptionInput = options.concurrency;
const concurrency = proxies.length > 0 ? concurrencyOptionInput || Math.min(proxies.length * 3, 100) : 1;
this.logger.log(`Concurrency: ${concurrency}`);
this.logger.log(`Starting to create sessions for ${accounts.length} accounts`);
this.accountsImportService.assignSecretsToAccounts(accounts, secrets);
let success = 0;
let fails = 0;
let left = accounts.length;
const queue = new pQueue({ concurrency, interval: 10, intervalCap: 1 });
for (const account of accounts) {
queue.add(async () => {
try {
const session = await this.createSessionsService.createSession(account);
await this.exportSessionsService.exportSession(session);
success++;
this.logger.log(`Success: ${account.username}, left: ${--left}`);
} catch (error) {
fails++;
this.logger.warn(`Fail: ${account.username}, left: ${--left}`);
}
});
}
await queue.onIdle();
this.logger.log(`Session creation complete`);
this.logger.log(`Success: ${success}`);
this.logger.log(`Fails: ${fails}`);
await delay(1000);
} catch (error) {
this.logger.error(error.message);
}
}
private async normalizeInput(input: string | string[]) {
if (!input) return [];
if (!Array.isArray(input)) input = [input];
const filteredInput = input.filter((el) => typeof el === 'string' && el.trim() !== '');
if (filteredInput.length === 0) return [];
const nestedData = await Promise.all(
filteredInput.map(async (el) => {
el = el.trim();
// Possible glob pattern
const files = await glob(el);
if (files.length > 0) return files;
// Possible string
return el.split(/\s+|\r?\n/).map((line) => line.trim());
}),
);
return nestedData.flat();
}
@Option({
required: true,
flags: '-a, --accounts <accounts...>',
description: `Specify one or more accounts.
Account can be specified as:
- A simple string.
- A file path to load accounts from (one account per line).
- A glob pattern to load accounts from multiple files.
Supported formats:
- username:password
- username:password:sharedSecret
- username:password:sharedSecret:identitySecret
- ASF json`,
})
private parseAccountsOption(val: string, accumulator: string[] = []) {
accumulator.push(val);
return accumulator;
}
@Option({
flags: '-s, --secrets <secrets...>',
description: `Specify one or more secrets.
Secret can be specified as:
- A file path to load secrets from file.
- A glob pattern to load secrets from multiple files.
Supported formats:
- maFile
- ASF db`,
})
private parseSecretsOption(val: string, accumulator: string[] = []) {
accumulator.push(val);
return accumulator;
}
@Option({
flags: '-p, --proxies <proxies...>',
description: `Specify one or more proxies.
Proxy can be specified as:
- A string in the format <protocol>://<username>:<password>@<host>:<port>.
- A file path to load proxies from a text file.
Supported protocols:
- http
- https`,
})
private parseProxiesOption(val: string, accumulator: string[] = []) {
accumulator.push(val);
return accumulator;
}
@Option({
flags: '-c, --concurrency <concurrency>',
description: `Specify the number of concurrent runs.
Default: 1, or the number of proxies.`,
})
private parseConcurrencyOption(val: string) {
const parsed = parseInt(val, 10);
if (Number.isNaN(parsed)) throw new Error('Concurrency must be a number');
if (parsed < 1) throw new Error('Concurrency must be greater than 0');
return parsed;
}
@Option({
flags: '-o, --output <output>',
description: 'Specify the output directory.',
defaultValue: './sessions',
})
private parseOutputOption(val: string) {
return val;
}
@Option({
flags: '--overwrite (-w)',
description: 'Overwrite existing sessions.',
defaultValue: false,
})
private parseOverwriteOption(val: string) {
return new CliUtilityService().parseBoolean(val);
}
@Help('afterAll')
private displayExamples() {
return `
Examples:
create -a accounts.txt -s ./secrets -p proxies.txt
create -a username:password -p proxies.txt`;
}
}
| src/commands/create/create-sessions.command.ts | Sadzurami-steam-sessions-creator-97b7294 | [
{
"filename": "src/commands/validate/validate-sessions.command.ts",
"retrieved_chunk": " public async run(args: string[], options: ValidateCommandOptions) {\n try {\n const sessionsOptionInput = await this.normalizeInput(options.sessions);\n const sessions = await this.sessionsImportService.loadSessions(sessionsOptionInput);\n if (sessions.length === 0) throw new Error('No sessions found');\n this.logger.log(`Sessions: ${sessions.length}`);\n await this.validateSessionsService.validateSessions(sessions);\n } catch (error) {\n this.logger.error(error.message);\n }",
"score": 63.570447005299
},
{
"filename": "src/modules/accounts-import/accounts-import.service.ts",
"retrieved_chunk": " if (!Array.isArray(input)) input = [input];\n if (input.length === 0) return [];\n let accounts: Account[] = [];\n const errors: string[] = [];\n const readResults = await Promise.all(input.map((input) => this.readAccountsFromInput(input)));\n for (const result of readResults) {\n accounts.push(...result.values);\n errors.push(...result.errors);\n }\n accounts = this.removeDuplicates(accounts);",
"score": 38.40253080573962
},
{
"filename": "src/modules/accounts-import/accounts-import.service.ts",
"retrieved_chunk": " if (errors.length > 0 && accounts.length > 0) {\n this.logger.warn(`The following account sources are invalid:\\n${errors.join('\\n')}`);\n await delay(1000);\n const { confirm } = await inquirer.prompt({\n type: 'confirm',\n name: 'confirm',\n message: 'Continue with the valid accounts?',\n default: false,\n });\n if (!confirm) throw new Error('Aborted by user');",
"score": 34.82776932770594
},
{
"filename": "src/modules/accounts-import/accounts-import.service.ts",
"retrieved_chunk": " }\n return accounts;\n }\n public assignSecretsToAccounts(accounts: Account[], secrets: Secrets[]) {\n const secretsMap = new Map<string, Secrets>();\n for (const secret of secrets) {\n secretsMap.set(secret.username, secret);\n // some existing steam-oriented apps are case-insensitive to usernames in secrets\n secretsMap.set(secret.username.toLowerCase(), secret);\n }",
"score": 30.54442878420899
},
{
"filename": "src/modules/secrets-import/secrets-import.service.ts",
"retrieved_chunk": " public async loadSecrets(input: string[] | string) {\n if (!input) return [];\n if (!Array.isArray(input)) input = [input];\n if (input.length === 0) return [];\n let secrets: Secrets[] = [];\n const errors: string[] = [];\n const readResults = await Promise.all(input.map((input) => this.readSecretsFromInput(input)));\n for (const result of readResults) {\n secrets.push(...result.values);\n errors.push(...result.errors);",
"score": 26.541761046402787
}
] | typescript | const secrets = await this.secretsImportService.loadSecrets(secretsOptionInput); |
import { describe, it, expect, beforeEach, jest } from '@jest/globals';
import { ParentNode } from './ParentNode';
import type { ChildNode } from './ChildNode';
const createChildMock = (): ChildNode => {
return {
appendTo: jest.fn(),
remove: jest.fn(),
parent: null,
} as unknown as ChildNode;
};
interface Dummy extends ParentNode {
}
/**
*
*/
@ParentNode
class Dummy {
/**
*
* @param _options - dummy options
*/
// eslint-disable-next-line @typescript-eslint/no-empty-function,@typescript-eslint/no-unused-vars,no-unused-vars
constructor(_options?: unknown) {}
}
describe('ParentNode decorator', () => {
let dummy: Dummy;
beforeEach(() => {
dummy = new Dummy();
jest.resetAllMocks();
});
it('should add removeChild method to the decorated class', () => {
expect(dummy.removeChild).toBeInstanceOf(Function);
});
it('should add append method to the decorated class', () => {
expect(dummy | .append).toBeInstanceOf(Function); |
});
it('should add insertAfter method to the decorated class', () => {
expect(dummy.insertAfter).toBeInstanceOf(Function);
});
describe('constructor', () => {
it('should append passed children to new parent', () => {
const childMock = createChildMock();
dummy = new Dummy({
children: [ childMock ],
});
expect(childMock.appendTo).toBeCalledWith(dummy);
});
});
describe('.children', () => {
it('should return empty array by default', () => {
expect(dummy.children).toEqual([]);
});
it('should return children passed via constructor', () => {
const childMock = createChildMock();
dummy = new Dummy({
children: [ childMock ],
});
expect(dummy.children).toEqual([ childMock ]);
});
});
describe('.append()', () => {
it('should add child to the children array', () => {
const childMock = createChildMock();
dummy.append(childMock);
expect(dummy.children).toContain(childMock);
});
it('should add several children to the children array', () => {
const childMock = createChildMock();
const anotherChildMock = createChildMock();
dummy.append(childMock, anotherChildMock);
expect(dummy.children).toEqual([childMock, anotherChildMock]);
});
it('should move a child to the end of children array if it is already there', () => {
const childMock = createChildMock();
const anotherChildMock = createChildMock();
const oneMoreChildMock = createChildMock();
dummy = new Dummy({
children: [childMock, anotherChildMock, oneMoreChildMock],
});
dummy.append(anotherChildMock);
expect(dummy.children).toEqual([childMock, oneMoreChildMock, anotherChildMock]);
});
it('should preserve already existing children', () => {
const childMock = createChildMock();
const anotherChildMock = createChildMock();
const oneMoreChildMock = createChildMock();
dummy = new Dummy({
children: [childMock, anotherChildMock],
});
dummy.append(oneMoreChildMock);
expect(dummy.children).toEqual([childMock, anotherChildMock, oneMoreChildMock]);
});
});
describe('.insertAfter()', () => {
it('should insert a child after passed target', () => {
const childMock = createChildMock();
const anotherChildMock = createChildMock();
const childMockToInsert = createChildMock();
dummy = new Dummy({
children: [childMock, anotherChildMock],
});
dummy.insertAfter(childMock, childMockToInsert);
expect(dummy.children).toEqual([childMock, childMockToInsert, anotherChildMock]);
});
it('should insert several children after passed target', () => {
const childMock = createChildMock();
const anotherChildMock = createChildMock();
const childMockToInsert = createChildMock();
const anotherChildMockToInsert = createChildMock();
dummy = new Dummy({
children: [childMock, anotherChildMock],
});
dummy.insertAfter(childMock, childMockToInsert, anotherChildMockToInsert);
expect(dummy.children).toEqual([childMock, childMockToInsert, anotherChildMockToInsert, anotherChildMock]);
});
it('should remove existing child and insert it to the new place', () => {
const childMock = createChildMock();
const anotherChildMock = createChildMock();
const oneMoreChildMock = createChildMock();
const childMockToInsert = createChildMock();
dummy = new Dummy({
children: [childMock, anotherChildMock, oneMoreChildMock, childMockToInsert],
});
dummy.insertAfter(anotherChildMock, childMockToInsert);
expect(dummy.children).toEqual([childMock, anotherChildMock, childMockToInsert, oneMoreChildMock]);
});
});
describe('.removeChild()', () => {
it('should remove child from the children array', () => {
const childMock = createChildMock();
dummy = new Dummy({
children: [ childMock ],
});
dummy.removeChild(childMock);
expect(dummy.children).toHaveLength(0);
});
it('should call remove method of child', () => {
const childMock = createChildMock();
dummy = new Dummy({
children: [ childMock ],
});
dummy.removeChild(childMock);
expect(childMock.remove).toBeCalled();
});
});
});
| src/entities/interfaces/ParentNode.spec.ts | editor-js-document-model-4cb9623 | [
{
"filename": "src/entities/interfaces/ChildNode.spec.ts",
"retrieved_chunk": " expect(parentMock.append).toBeCalledWith(dummy);\n });\n it('should add remove method to the decorated class', () => {\n expect(dummy.remove).toBeInstanceOf(Function);\n });\n it('should add appendTo method to the decorated class', () => {\n expect(dummy.appendTo).toBeInstanceOf(Function);\n });\n describe('.parent', () => {\n it('should return null by default', () => {",
"score": 83.80455522026364
},
{
"filename": "src/entities/interfaces/ChildNode.spec.ts",
"retrieved_chunk": " dummy = new Dummy({\n parent: parentMock,\n });\n });\n it('should call parent\\'s removeChild method', () => {\n dummy.remove();\n expect(parentMock.removeChild).toBeCalledWith(dummy);\n });\n it('should set node\\'s parent to null', () => {\n dummy.remove();",
"score": 47.70379036530604
},
{
"filename": "src/entities/interfaces/ChildNode.spec.ts",
"retrieved_chunk": " expect(dummy.parent).toBeNull();\n });\n });\n describe('.appendTo()', () => {\n beforeEach(() => {\n dummy = new Dummy();\n });\n it('should call parent\\'s append method on appendTo call', () => {\n dummy.appendTo(parentMock);\n expect(parentMock.append).toBeCalledWith(dummy);",
"score": 46.53012314705578
},
{
"filename": "src/entities/interfaces/ChildNode.spec.ts",
"retrieved_chunk": " // eslint-disable-next-line @typescript-eslint/no-empty-function,@typescript-eslint/no-unused-vars,no-unused-vars\n constructor(_options?: unknown) {}\n}\ndescribe('ChildNode decorator', () => {\n let dummy: Dummy;\n beforeEach(() => {\n jest.resetAllMocks();\n });\n it('should decorated class to a parent', () => {\n dummy = new Dummy({ parent: parentMock });",
"score": 43.32436648914171
},
{
"filename": "src/entities/interfaces/ChildNode.spec.ts",
"retrieved_chunk": " dummy = new Dummy();\n expect(dummy.parent).toBeNull();\n });\n it('should return parent passed via constructor', () => {\n dummy = new Dummy({ parent: parentMock });\n expect(dummy.parent).toEqual(parentMock);\n });\n });\n describe('.remove()', () => {\n beforeEach(() => {",
"score": 33.5308660156025
}
] | typescript | .append).toBeInstanceOf(Function); |
import fs from 'fs/promises';
import inquirer from 'inquirer';
import pQueue from 'p-queue';
import { setTimeout as delay } from 'timers/promises';
import { Injectable, Logger } from '@nestjs/common';
import { Account as IAccount } from '../../interfaces/account.interface';
import { Secrets } from '../../interfaces/secrets.interface';
class Account implements IAccount {
public readonly username: string;
public readonly password: string;
public sharedSecret: string | null = null;
public identitySecret: string | null = null;
constructor(account: string) {
account = account.trim();
if (account.length === 0) throw new Error('Invalid account');
const parts = account.split(':').map((part) => part.trim());
if (parts.length < 2) throw new Error('Invalid account');
const [username, password, sharedSecret, identitySecret] = parts;
this.username = username;
this.password = password;
if (sharedSecret) this.sharedSecret = sharedSecret;
if (identitySecret) this.identitySecret = identitySecret;
}
}
@Injectable()
export class AccountsImportService {
private readonly logger = new Logger(AccountsImportService.name);
private readonly readFilesQueue = new pQueue({ concurrency: 100 });
public async loadAccounts(input: string[] | string) {
if (!input) return [];
if (!Array.isArray(input)) input = [input];
if (input.length === 0) return [];
let accounts: Account[] = [];
const errors: string[] = [];
const readResults = await Promise.all(input.map((input) => this.readAccountsFromInput(input)));
for (const result of readResults) {
accounts.push(...result.values);
errors.push(...result.errors);
}
accounts = this.removeDuplicates(accounts);
if (errors.length > 0 && accounts.length > 0) {
this.logger.warn(`The following account sources are invalid:\n${errors.join('\n')}`);
await delay(1000);
const { confirm } = await inquirer.prompt({
type: 'confirm',
name: 'confirm',
message: 'Continue with the valid accounts?',
default: false,
});
if (!confirm) throw new Error('Aborted by user');
}
return accounts;
}
| public assignSecretsToAccounts(accounts: Account[], secrets: Secrets[]) { |
const secretsMap = new Map<string, Secrets>();
for (const secret of secrets) {
secretsMap.set(secret.username, secret);
// some existing steam-oriented apps are case-insensitive to usernames in secrets
secretsMap.set(secret.username.toLowerCase(), secret);
}
for (const account of accounts) {
let secret = secretsMap.get(account.username);
if (!secret) secret = secretsMap.get(account.username.toLowerCase());
if (!secret) continue;
account.sharedSecret = secret.sharedSecret;
account.identitySecret = secret.identitySecret;
}
}
private removeDuplicates(accounts: Account[]) {
const map = new Map<string, Account>();
for (const account of accounts) map.set(account.username, account);
return [...map.values()];
}
private async readAccountsFromInput(input: string) {
const inputType = await this.inferInputType(input);
if (inputType === 'file') return this.readAccountsFromFile(input);
if (inputType === 'string') return this.readAccountFromString(input);
if (inputType === 'directory') return { values: [], errors: [input] };
}
private async readAccountsFromFile(filePath: string) {
const result: { values: Account[]; errors: string[] } = { values: [], errors: [] };
try {
let content = await this.readFilesQueue.add(() => fs.readFile(filePath, 'utf-8'));
content = content.trim();
if (content.length === 0) throw new Error('Empty file');
// session file
if (filePath.endsWith('.steamsession')) {
const readResults = this.readAccountFromSessionFile(content);
result.values.push(...readResults.values);
if (readResults.errors.length > 0) result.errors.push(filePath);
return result;
}
// asf json
if (filePath.endsWith('.json') && content.includes('"SteamLogin"')) {
const readResults = this.readAccountFromAsfJson(content);
result.values.push(...readResults.values);
if (readResults.errors.length > 0) result.errors.push(filePath);
return result;
}
// plain text
if (content.includes(':')) {
const lines = content
.split(/\s+|\r?\n/)
.map((l) => l.trim())
.filter((l) => l.length > 0);
if (lines.length === 0) throw new Error('Empty file');
for (const line of lines) {
const readResults = this.readAccountFromString(line);
result.values.push(...readResults.values);
if (readResults.errors.length > 0) result.errors.push(line);
}
return result;
}
throw new Error('Unsupported file format');
} catch (error) {
result.errors.push(filePath);
}
return result;
}
private readAccountFromString(str: string) {
const result: { values: Account[]; errors: string[] } = { values: [], errors: [] };
try {
const account = new Account(str);
result.values.push(account);
} catch (error) {
result.errors.push(str);
}
return result;
}
private readAccountFromAsfJson(fileContent: string) {
const result: { values: Account[]; errors: string[] } = { values: [], errors: [] };
try {
const { SteamLogin: username, SteamPassword: password } = JSON.parse(fileContent);
if (!username) throw new Error('Invalid username');
if (!password) throw new Error('Invalid password');
const account = new Account(`${username}:${password}`);
result.values.push(account);
} catch (error) {
result.errors.push(fileContent);
}
return result;
}
private readAccountFromSessionFile(fileContent: string) {
const result: { values: Account[]; errors: string[] } = { values: [], errors: [] };
try {
const { Username, Password, SharedSecret, IdentitySecret } = JSON.parse(fileContent);
if (!Username) throw new Error('Invalid username');
if (!Password) throw new Error('Invalid password');
const account = new Account(`${Username}:${Password}:${SharedSecret || ''}:${IdentitySecret || ''}`);
result.values.push(account);
} catch (error) {
result.errors.push(fileContent);
}
return result;
}
private async inferInputType(input: string) {
if (typeof input !== 'string') throw new Error(`Invalid input type: '${typeof input}'`);
try {
const stats = await fs.stat(input);
if (stats.isFile()) return 'file';
if (stats.isDirectory()) return 'directory';
} catch (error) {
return 'string';
}
}
}
| src/modules/accounts-import/accounts-import.service.ts | Sadzurami-steam-sessions-creator-97b7294 | [
{
"filename": "src/modules/sessions-import/sessions-import.service.ts",
"retrieved_chunk": " }\n sessions = this.removeDuplicates(sessions);\n if (errors.length > 0 && sessions.length > 0) {\n this.logger.warn(`The following session sources are invalid:\\n${errors.join('\\n')}`);\n await delay(1000);\n const { confirm } = await inquirer.prompt({\n type: 'confirm',\n name: 'confirm',\n message: 'Continue with the valid sessions?',\n default: false,",
"score": 38.52542090897718
},
{
"filename": "src/modules/secrets-import/secrets-import.service.ts",
"retrieved_chunk": " default: false,\n });\n if (!confirm) throw new Error('Aborted by user');\n }\n return secrets;\n }\n private removeDuplicates(secrets: Secrets[]) {\n const map = new Map<string, Secrets>();\n for (const secret of secrets) map.set(secret.username, secret);\n return [...map.values()];",
"score": 37.03628278251486
},
{
"filename": "src/modules/proxies-import/proxies-import.service.ts",
"retrieved_chunk": " proxies = this.removeDuplicates(proxies);\n if (errors.length > 0) {\n this.logger.warn(`The following proxy sources are invalid:\\n${errors.join('\\n')}`);\n await delay(1000);\n const { confirm } = await inquirer.prompt({\n type: 'confirm',\n name: 'confirm',\n message: proxies.length > 0 ? `Continue with ${proxies.length} valid proxies ?` : 'Continue without proxies ?',\n default: false,\n });",
"score": 35.721944773148806
},
{
"filename": "src/modules/secrets-import/secrets-import.service.ts",
"retrieved_chunk": " }\n secrets = this.removeDuplicates(secrets);\n if (errors.length > 0) {\n this.logger.warn(`The following secret sources are invalid:\\n${errors.join('\\n')}`);\n await delay(1000);\n const { confirm } = await inquirer.prompt({\n type: 'confirm',\n name: 'confirm',\n message:\n secrets.length > 0 ? `Continue with ${secrets.length} valid secrets?` : 'Continue without any secrets?',",
"score": 34.8364307408703
},
{
"filename": "src/modules/sessions-import/sessions-import.service.ts",
"retrieved_chunk": " });\n if (!confirm) throw new Error('Aborted by user');\n }\n return sessions;\n }\n private removeDuplicates(sessions: Session[]) {\n const map = new Map<string, Session>();\n for (const session of sessions) map.set(session.username, session);\n return [...map.values()];\n }",
"score": 24.84004557434082
}
] | typescript | public assignSecretsToAccounts(accounts: Account[], secrets: Secrets[]) { |
import { describe, it, expect, beforeEach, jest } from '@jest/globals';
import { ParentNode } from './ParentNode';
import type { ChildNode } from './ChildNode';
const createChildMock = (): ChildNode => {
return {
appendTo: jest.fn(),
remove: jest.fn(),
parent: null,
} as unknown as ChildNode;
};
interface Dummy extends ParentNode {
}
/**
*
*/
@ParentNode
class Dummy {
/**
*
* @param _options - dummy options
*/
// eslint-disable-next-line @typescript-eslint/no-empty-function,@typescript-eslint/no-unused-vars,no-unused-vars
constructor(_options?: unknown) {}
}
describe('ParentNode decorator', () => {
let dummy: Dummy;
beforeEach(() => {
dummy = new Dummy();
jest.resetAllMocks();
});
it('should add removeChild method to the decorated class', () => {
expect(dummy.removeChild).toBeInstanceOf(Function);
});
it('should add append method to the decorated class', () => {
expect(dummy.append).toBeInstanceOf(Function);
});
it('should add insertAfter method to the decorated class', () => {
expect(dummy.insertAfter).toBeInstanceOf(Function);
});
describe('constructor', () => {
it('should append passed children to new parent', () => {
const childMock = createChildMock();
dummy = new Dummy({
children: [ childMock ],
});
expect(childMock | .appendTo).toBeCalledWith(dummy); |
});
});
describe('.children', () => {
it('should return empty array by default', () => {
expect(dummy.children).toEqual([]);
});
it('should return children passed via constructor', () => {
const childMock = createChildMock();
dummy = new Dummy({
children: [ childMock ],
});
expect(dummy.children).toEqual([ childMock ]);
});
});
describe('.append()', () => {
it('should add child to the children array', () => {
const childMock = createChildMock();
dummy.append(childMock);
expect(dummy.children).toContain(childMock);
});
it('should add several children to the children array', () => {
const childMock = createChildMock();
const anotherChildMock = createChildMock();
dummy.append(childMock, anotherChildMock);
expect(dummy.children).toEqual([childMock, anotherChildMock]);
});
it('should move a child to the end of children array if it is already there', () => {
const childMock = createChildMock();
const anotherChildMock = createChildMock();
const oneMoreChildMock = createChildMock();
dummy = new Dummy({
children: [childMock, anotherChildMock, oneMoreChildMock],
});
dummy.append(anotherChildMock);
expect(dummy.children).toEqual([childMock, oneMoreChildMock, anotherChildMock]);
});
it('should preserve already existing children', () => {
const childMock = createChildMock();
const anotherChildMock = createChildMock();
const oneMoreChildMock = createChildMock();
dummy = new Dummy({
children: [childMock, anotherChildMock],
});
dummy.append(oneMoreChildMock);
expect(dummy.children).toEqual([childMock, anotherChildMock, oneMoreChildMock]);
});
});
describe('.insertAfter()', () => {
it('should insert a child after passed target', () => {
const childMock = createChildMock();
const anotherChildMock = createChildMock();
const childMockToInsert = createChildMock();
dummy = new Dummy({
children: [childMock, anotherChildMock],
});
dummy.insertAfter(childMock, childMockToInsert);
expect(dummy.children).toEqual([childMock, childMockToInsert, anotherChildMock]);
});
it('should insert several children after passed target', () => {
const childMock = createChildMock();
const anotherChildMock = createChildMock();
const childMockToInsert = createChildMock();
const anotherChildMockToInsert = createChildMock();
dummy = new Dummy({
children: [childMock, anotherChildMock],
});
dummy.insertAfter(childMock, childMockToInsert, anotherChildMockToInsert);
expect(dummy.children).toEqual([childMock, childMockToInsert, anotherChildMockToInsert, anotherChildMock]);
});
it('should remove existing child and insert it to the new place', () => {
const childMock = createChildMock();
const anotherChildMock = createChildMock();
const oneMoreChildMock = createChildMock();
const childMockToInsert = createChildMock();
dummy = new Dummy({
children: [childMock, anotherChildMock, oneMoreChildMock, childMockToInsert],
});
dummy.insertAfter(anotherChildMock, childMockToInsert);
expect(dummy.children).toEqual([childMock, anotherChildMock, childMockToInsert, oneMoreChildMock]);
});
});
describe('.removeChild()', () => {
it('should remove child from the children array', () => {
const childMock = createChildMock();
dummy = new Dummy({
children: [ childMock ],
});
dummy.removeChild(childMock);
expect(dummy.children).toHaveLength(0);
});
it('should call remove method of child', () => {
const childMock = createChildMock();
dummy = new Dummy({
children: [ childMock ],
});
dummy.removeChild(childMock);
expect(childMock.remove).toBeCalled();
});
});
});
| src/entities/interfaces/ParentNode.spec.ts | editor-js-document-model-4cb9623 | [
{
"filename": "src/entities/interfaces/ChildNode.spec.ts",
"retrieved_chunk": " expect(dummy.parent).toBeNull();\n });\n });\n describe('.appendTo()', () => {\n beforeEach(() => {\n dummy = new Dummy();\n });\n it('should call parent\\'s append method on appendTo call', () => {\n dummy.appendTo(parentMock);\n expect(parentMock.append).toBeCalledWith(dummy);",
"score": 43.08533643050165
},
{
"filename": "src/entities/interfaces/ChildNode.spec.ts",
"retrieved_chunk": " expect(parentMock.append).toBeCalledWith(dummy);\n });\n it('should add remove method to the decorated class', () => {\n expect(dummy.remove).toBeInstanceOf(Function);\n });\n it('should add appendTo method to the decorated class', () => {\n expect(dummy.appendTo).toBeInstanceOf(Function);\n });\n describe('.parent', () => {\n it('should return null by default', () => {",
"score": 40.970412840245935
},
{
"filename": "src/entities/interfaces/ChildNode.spec.ts",
"retrieved_chunk": " dummy = new Dummy();\n expect(dummy.parent).toBeNull();\n });\n it('should return parent passed via constructor', () => {\n dummy = new Dummy({ parent: parentMock });\n expect(dummy.parent).toEqual(parentMock);\n });\n });\n describe('.remove()', () => {\n beforeEach(() => {",
"score": 40.16738916183634
},
{
"filename": "src/entities/interfaces/ChildNode.spec.ts",
"retrieved_chunk": " dummy = new Dummy({\n parent: parentMock,\n });\n });\n it('should call parent\\'s removeChild method', () => {\n dummy.remove();\n expect(parentMock.removeChild).toBeCalledWith(dummy);\n });\n it('should set node\\'s parent to null', () => {\n dummy.remove();",
"score": 33.57305610745017
},
{
"filename": "src/entities/interfaces/ChildNode.spec.ts",
"retrieved_chunk": " });\n it('should set node\\'s parent on appendTo call', () => {\n dummy.appendTo(parentMock);\n expect(dummy.parent).toBe(parentMock);\n });\n it('should do nothing if parents are the same', () => {\n const dummyWithParent = new Dummy({\n parent: parentMock,\n });\n jest.resetAllMocks();",
"score": 29.182381174099312
}
] | typescript | .appendTo).toBeCalledWith(dummy); |
import { beforeEach, describe } from '@jest/globals';
import { ParentNode } from './ParentNode';
import { ChildNode } from './ChildNode';
interface DummyParent extends ParentNode {}
/**
*
*/
@ParentNode
class DummyParent {
/**
*
* @param _options - dummy options
*/
// eslint-disable-next-line @typescript-eslint/no-empty-function,@typescript-eslint/no-unused-vars,no-unused-vars
constructor(_options?: unknown) {}
}
interface DummyChild extends ChildNode {}
/**
*
*/
@ChildNode
class DummyChild {
/**
*
* @param _options - dummy options
*/
// eslint-disable-next-line @typescript-eslint/no-empty-function,@typescript-eslint/no-unused-vars,no-unused-vars
constructor(_options?: unknown) {}
}
describe('ParentNode and ChildNode integration', () => {
describe('child removal', () => {
let parent: DummyParent;
let child: DummyChild;
beforeEach(() => {
parent = new DummyParent();
child = new DummyChild({ parent });
});
it('should remove child from parent on child.remove() call', () => {
child.remove();
expect(parent.children).not.toContain(child);
});
it('should set child\'s parent to null on parent.removeChild() call', () => {
| parent.removeChild(child); |
expect(child.parent).toBeNull();
});
});
describe('child addition', () => {
let parent: DummyParent;
let child: DummyChild;
beforeEach(() => {
parent = new DummyParent();
child = new DummyChild();
});
it('should add child to parent on child.appendTo call', () => {
child.appendTo(parent);
expect(parent.children).toContain(child);
});
it('should set child\'s parent on parent.append() call', () => {
parent.append(child);
expect(child.parent).toEqual(parent);
});
it('should set child\'s parent on parent.insertAfter() call', () => {
const anotherChild = new DummyChild();
parent.append(child);
parent.insertAfter(child, anotherChild);
expect(anotherChild.parent).toEqual(parent);
});
});
describe('child transfer from parent to parent', () => {
let parent: DummyParent;
let anotherParent: DummyParent;
let child: DummyChild;
beforeEach(() => {
parent = new DummyParent();
child = new DummyChild({ parent });
anotherParent = new DummyParent();
});
it('should remove child from the old parent on new parent.append() call', () => {
anotherParent.append(child);
expect(parent.children).not.toContain(child);
});
it('should remove child from the old parent on new parent.insertAfter() call', () => {
const anotherChild = new DummyChild({ parent: anotherParent });
anotherParent.insertAfter(anotherChild, child);
expect(parent.children).not.toContain(child);
});
});
});
| src/entities/interfaces/integration.spec.ts | editor-js-document-model-4cb9623 | [
{
"filename": "src/entities/interfaces/ChildNode.spec.ts",
"retrieved_chunk": " dummy = new Dummy({\n parent: parentMock,\n });\n });\n it('should call parent\\'s removeChild method', () => {\n dummy.remove();\n expect(parentMock.removeChild).toBeCalledWith(dummy);\n });\n it('should set node\\'s parent to null', () => {\n dummy.remove();",
"score": 59.06526091223734
},
{
"filename": "src/entities/interfaces/ParentNode.ts",
"retrieved_chunk": " */\n append(...children: ChildNode[]): void;\n /**\n * Removes a child from the parent\n *\n * @param child - child to remove\n */\n removeChild(child: ChildNode): void;\n /**\n * Inserts new children after specified target",
"score": 51.68483317489553
},
{
"filename": "src/entities/interfaces/ChildNode.spec.ts",
"retrieved_chunk": " expect(dummy.parent).toBeNull();\n });\n });\n describe('.appendTo()', () => {\n beforeEach(() => {\n dummy = new Dummy();\n });\n it('should call parent\\'s append method on appendTo call', () => {\n dummy.appendTo(parentMock);\n expect(parentMock.append).toBeCalledWith(dummy);",
"score": 42.993796402584785
},
{
"filename": "src/entities/interfaces/ChildNode.spec.ts",
"retrieved_chunk": " });\n it('should set node\\'s parent on appendTo call', () => {\n dummy.appendTo(parentMock);\n expect(dummy.parent).toBe(parentMock);\n });\n it('should do nothing if parents are the same', () => {\n const dummyWithParent = new Dummy({\n parent: parentMock,\n });\n jest.resetAllMocks();",
"score": 41.89963864557263
},
{
"filename": "src/entities/interfaces/ParentNode.ts",
"retrieved_chunk": " *\n * @param child - child to remove\n */\n public removeChild(child: ChildNode): void {\n const index = this.children.indexOf(child);\n this.children.splice(index, 1);\n child.remove();\n }\n /**\n * Inserts new children after specified target",
"score": 41.813531312220285
}
] | typescript | parent.removeChild(child); |
import { App, Notice, PluginSettingTab, Setting, debounce } from "obsidian";
import FinDocPlugin from "main";
import { idToText } from "utils";
import loadIcons from "loadIcons";
import { types } from "./constants";
export default class SettingsTab extends PluginSettingTab {
plugin: FinDocPlugin;
constructor(app: App, plugin: FinDocPlugin) {
super(app, plugin);
this.plugin = plugin;
loadIcons();
}
createNewColorBtn(): HTMLElement {
const btn = this.containerEl.createEl("button");
btn.classList.add("findoc-btn-margin-bottom");
btn.id = "newColor";
btn.innerText = "Add New Color";
btn.onClickEvent(() => {
this.plugin.settings.colors.unshift("#ffffff");
console.debug(this.plugin.settings.colors);
this.display();
});
return btn;
}
display(): void {
const { containerEl } = this;
containerEl.empty();
containerEl.createEl("h2", { text: "Settings" });
new Setting(containerEl).setName("Support").addButton((button) => {
button.buttonEl.innerHTML =
"<a style='margin: 0 auto;' href='https://www.buymeacoffee.com/studiowebux'><img width='109px' alt='Buy me a Coffee' src='https://cdn.buymeacoffee.com/buttons/v2/default-yellow.png'/></a>";
button.buttonEl.classList.add("findoc-support-btn");
});
new Setting(containerEl)
.setName("CSV Save debounce")
.setDesc(
"Timeout to trigger the CSV saving process (Value must be greater than 500 and less than 5000)"
)
.addText((text) => {
text.setValue(this.plugin.settings.debounce.toString());
text.onChange(
debounce(async (value: string) => {
if (
isNaN(parseInt(value)) ||
parseInt(value) < 500 ||
parseInt(value) > 5000
) {
new Notice("Invalid debounce value !");
return;
}
this.plugin.settings.debounce = value;
await this.plugin.saveSettings();
new Notice("Debounce Updated !");
}, 500)
);
});
new Setting(containerEl).setName("CSV Separator").addText((text) => {
text.setValue(this.plugin.settings.csvSeparator.toString());
text.onChange(
debounce(async (value: string) => {
this.plugin.settings.csvSeparator = value;
await this.plugin.saveSettings();
new Notice("CSV Separator Updated !");
}, 500)
);
});
new Setting(containerEl)
.setName("Models")
.setDesc("Models available (It must be a JSON.stringify version)");
const div = containerEl.createDiv();
div.classList.add("findoc-models-container");
Object.entries(this.plugin.settings.models).forEach(([key, model]) => {
const name = idToText(key);
const modelSection = div.createDiv();
const el = modelSection.createEl("h2");
el.innerText = name;
modelSection.classList.add("findoc-model-section");
new Setting(modelSection)
.setName(`Data Source for ${name}`)
.addDropdown((dropdown) => {
dropdown.addOption(
"splitDailyDates",
"Split By Daily Dates"
);
dropdown.addOption(
"splitByYearMonth",
"Split By Year & Month"
);
dropdown.addOption("splitByYear", "Split By Year");
dropdown.setValue(
this.plugin.settings.models[key].dataSource
);
dropdown.onChange(async (value) => {
this.plugin.settings.models[key].dataSource = value;
await this.plugin.saveSettings();
new Notice("Data Source Updated !");
});
});
new Setting(modelSection)
.setName(`Output Function for ${name}`)
.addDropdown((dropdown) => {
dropdown.addOption(
"generateSumDataSet",
"Generate Sum Dataset"
);
dropdown.addOption(
"generateDailyDataSet",
"Generate Daily Dataset"
);
dropdown.addOption(
"generateSumDataSetPerTypes",
"Generate Sum Dataset Per Types"
);
dropdown.setValue(this.plugin.settings.models[key].output);
dropdown.onChange(async (value) => {
this.plugin.settings.models[key].output = value;
await this.plugin.saveSettings();
new Notice("Output Updated !");
});
});
new Setting(modelSection)
.setName(`Begin at Zero for ${name}`)
.addToggle((toggle) => {
toggle.setValue(
this.plugin.settings.models[key].beginAtZero
);
toggle.onChange(async (value) => {
this.plugin.settings.models[key].beginAtZero = value;
await this.plugin.saveSettings();
new Notice("Begin at Zero Updated !");
});
});
const h2 = modelSection.createEl("h2");
h2.innerText = `Types for ${name}`;
const wrapper = modelSection.createDiv();
wrapper.classList.add("findoc-model-section-wrapper");
const select = wrapper.createEl("select");
select.id = key;
select.multiple = true;
select.classList.add("findoc-select");
select.setAttribute("value", model.types.join(","));
select.onchange = async () => {
const selected = [];
// @ts-ignore
for (const option of document.getElementById(key).options) {
if (option.selected) {
selected.push(option.value);
}
}
// select.value = selected.join(",");
model.types = selected;
await this.plugin.saveSettings();
new Notice("Types Updated !");
};
| types.forEach((type: string) => { |
const opt = select.createEl("option");
opt.id = type;
opt.value = type;
opt.innerText = type;
opt.selected = model.types.includes(type);
});
modelSection.createEl("hr");
});
new Setting(containerEl).setName("Colors");
const colorSection = containerEl.createDiv();
colorSection.appendChild(this.createNewColorBtn());
colorSection.classList.add("findoc-color-section")
this.plugin.settings.colors.forEach((color, key) => {
new Setting(colorSection)
.setName(`Color #${key}`)
.addColorPicker(async (colorPicker) => {
colorPicker.setValue(color);
colorPicker.onChange(
debounce(async (value: string) => {
this.plugin.settings.colors[key] = value;
await this.plugin.saveSettings();
new Notice("Color Updated !");
}, 500)
);
})
.addExtraButton((btn) => {
btn.setTooltip("Delete Color");
btn.setIcon("trash");
btn.onClick(async () => {
this.plugin.settings.colors.splice(key, 1);
await this.plugin.saveSettings();
new Notice("Color Deleted !");
this.display();
});
});
});
}
}
| src/SettingsTab.ts | yet-another-tool-obsidian-findoc-6c84413 | [
{
"filename": "src/view.ts",
"retrieved_chunk": "\t\tconst dropdown = this.contentEl.createEl(\"select\");\n\t\tdropdown.id = id;\n\t\tdropdown.setAttribute(\"value\", selected);\n\t\tdropdown.onchange = () => {\n\t\t\tdropdown.setAttribute(\"value\", dropdown.value);\n\t\t};\n\t\ttypes.forEach((option: string) => {\n\t\t\tconst opt = this.contentEl.createEl(\"option\");\n\t\t\topt.value = option;\n\t\t\topt.id = id + option.replace(\" \", \"_\");",
"score": 38.67221445639553
},
{
"filename": "src/view.ts",
"retrieved_chunk": "\t\t\topt.innerText = option;\n\t\t\tif (option === selected) opt.selected = true;\n\t\t\tdropdown.appendChild(opt);\n\t\t});\n\t\treturn dropdown;\n\t}\n\tcreateTable(data: string[]) {\n\t\tthis.div = this.contentEl.createDiv();\n\t\tconst table = this.contentEl.createEl(\"table\");\n\t\t//",
"score": 31.126816965199417
},
{
"filename": "src/view.ts",
"retrieved_chunk": "\tparent: HTMLElement;\n\tconstructor(leaf: WorkspaceLeaf, plugin: FinDocPlugin) {\n\t\tsuper(leaf);\n\t\tthis.plugin = plugin;\n\t}\n\tgetViewData() {\n\t\treturn this.tableData.join(\"\\n\");\n\t}\n\tdropdown(selected = \"\"): HTMLElement {\n\t\tconst id = new Date().getTime().toString();",
"score": 23.171037080858316
},
{
"filename": "src/methods.ts",
"retrieved_chunk": "\t\t\t\t\t\t\t\t\ttypes[current.id] += current.value;\n\t\t\t\t\t\t\t\t\treturn types;\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{}\n\t\t\t\t\t\t\t);\n\t\t\t\t\t})\n\t\t\t\t\t.reduce((typeSum, current) => {\n\t\t\t\t\t\tif (current[type]) typeSum.push(current[type]);\n\t\t\t\t\t\telse typeSum.push(0);\n\t\t\t\t\t\treturn typeSum;",
"score": 13.936900856055853
},
{
"filename": "src/main.ts",
"retrieved_chunk": "\t\t);\n\t}\n\tasync saveSettings() {\n\t\tawait this.saveData(this.settings);\n\t}\n\tasync onload() {\n\t\ttry {\n\t\t\tawait this.loadSettings();\n\t\t\tthis.addSettingTab(new SettingsTab(this.app, this));\n\t\t\tconst { vault } = this.app;",
"score": 12.644667716633256
}
] | typescript | types.forEach((type: string) => { |
import glob from 'fast-glob';
import { CliUtilityService, Command, CommandRunner, Help, Option } from 'nest-commander';
import pQueue from 'p-queue';
import path from 'path';
import { setTimeout as delay } from 'timers/promises';
import { Logger } from '@nestjs/common';
import { AccountsImportService } from '../../modules/accounts-import/accounts-import.service';
import { ExportSessionsService } from '../../modules/export-sessions/export-sessions.service';
import { ProxiesImportService } from '../../modules/proxies-import/proxies-import.service';
import { ProxiesService } from '../../modules/proxies/proxies.service';
import { SecretsImportService } from '../../modules/secrets-import/secrets-import.service';
import { CreateSessionsService } from './create-sessions.service';
interface CreateCommandOptions {
accounts: string | string[];
secrets: string | string[];
proxies: string | string[];
concurrency: number;
output: string;
overwrite: boolean;
}
@Command({
name: 'create',
description: 'Creates new sessions',
})
export class CreateSessionsCommand extends CommandRunner {
private readonly logger = new Logger(CreateSessionsCommand.name);
constructor(
private readonly createSessionsService: CreateSessionsService,
private readonly exportSessionsService: ExportSessionsService,
private readonly accountsImportService: AccountsImportService,
private readonly secretsImportService: SecretsImportService,
private readonly proxiesImportService: ProxiesImportService,
private readonly proxiesService: ProxiesService,
) {
super();
}
public async run(args: string[], options: CreateCommandOptions) {
try {
const accountsOptionInput = await this.normalizeInput(options.accounts);
let accounts = await this.accountsImportService.loadAccounts(accountsOptionInput);
if (accounts.length === 0) throw new Error('No accounts found');
this.logger.log(`Accounts: ${accounts.length}`);
const secretsOptionInput = await this.normalizeInput(options.secrets);
const secrets = await this.secretsImportService.loadSecrets(secretsOptionInput);
this.logger.log(`Secrets: ${secrets.length}`);
const outputOptionInput = options.output;
if (!outputOptionInput) throw new Error('Output path is required');
const output = path.resolve(outputOptionInput);
| await this.exportSessionsService.setOutputPath(output); |
this.logger.log(`Output: ${output}`);
const overwriteExistingSessions = options.overwrite;
if (!overwriteExistingSessions) {
const sessionsPaths = await this.normalizeInput(`${output}/*`);
const existingSessions = await this.accountsImportService.loadAccounts(sessionsPaths);
this.logger.log(`Ignoring existing sessions: ${existingSessions.length}`);
accounts = accounts.filter((account) => !existingSessions.some((a) => a.username === account.username));
}
if (accounts.length === 0) {
this.logger.log('No accounts to create');
return;
}
const proxiesOptionInput = await this.normalizeInput(options.proxies);
const proxies = await this.proxiesImportService.loadProxies(proxiesOptionInput);
this.proxiesService.setProxies(proxies);
this.logger.log(`Proxies: ${proxies.length}`);
const concurrencyOptionInput = options.concurrency;
const concurrency = proxies.length > 0 ? concurrencyOptionInput || Math.min(proxies.length * 3, 100) : 1;
this.logger.log(`Concurrency: ${concurrency}`);
this.logger.log(`Starting to create sessions for ${accounts.length} accounts`);
this.accountsImportService.assignSecretsToAccounts(accounts, secrets);
let success = 0;
let fails = 0;
let left = accounts.length;
const queue = new pQueue({ concurrency, interval: 10, intervalCap: 1 });
for (const account of accounts) {
queue.add(async () => {
try {
const session = await this.createSessionsService.createSession(account);
await this.exportSessionsService.exportSession(session);
success++;
this.logger.log(`Success: ${account.username}, left: ${--left}`);
} catch (error) {
fails++;
this.logger.warn(`Fail: ${account.username}, left: ${--left}`);
}
});
}
await queue.onIdle();
this.logger.log(`Session creation complete`);
this.logger.log(`Success: ${success}`);
this.logger.log(`Fails: ${fails}`);
await delay(1000);
} catch (error) {
this.logger.error(error.message);
}
}
private async normalizeInput(input: string | string[]) {
if (!input) return [];
if (!Array.isArray(input)) input = [input];
const filteredInput = input.filter((el) => typeof el === 'string' && el.trim() !== '');
if (filteredInput.length === 0) return [];
const nestedData = await Promise.all(
filteredInput.map(async (el) => {
el = el.trim();
// Possible glob pattern
const files = await glob(el);
if (files.length > 0) return files;
// Possible string
return el.split(/\s+|\r?\n/).map((line) => line.trim());
}),
);
return nestedData.flat();
}
@Option({
required: true,
flags: '-a, --accounts <accounts...>',
description: `Specify one or more accounts.
Account can be specified as:
- A simple string.
- A file path to load accounts from (one account per line).
- A glob pattern to load accounts from multiple files.
Supported formats:
- username:password
- username:password:sharedSecret
- username:password:sharedSecret:identitySecret
- ASF json`,
})
private parseAccountsOption(val: string, accumulator: string[] = []) {
accumulator.push(val);
return accumulator;
}
@Option({
flags: '-s, --secrets <secrets...>',
description: `Specify one or more secrets.
Secret can be specified as:
- A file path to load secrets from file.
- A glob pattern to load secrets from multiple files.
Supported formats:
- maFile
- ASF db`,
})
private parseSecretsOption(val: string, accumulator: string[] = []) {
accumulator.push(val);
return accumulator;
}
@Option({
flags: '-p, --proxies <proxies...>',
description: `Specify one or more proxies.
Proxy can be specified as:
- A string in the format <protocol>://<username>:<password>@<host>:<port>.
- A file path to load proxies from a text file.
Supported protocols:
- http
- https`,
})
private parseProxiesOption(val: string, accumulator: string[] = []) {
accumulator.push(val);
return accumulator;
}
@Option({
flags: '-c, --concurrency <concurrency>',
description: `Specify the number of concurrent runs.
Default: 1, or the number of proxies.`,
})
private parseConcurrencyOption(val: string) {
const parsed = parseInt(val, 10);
if (Number.isNaN(parsed)) throw new Error('Concurrency must be a number');
if (parsed < 1) throw new Error('Concurrency must be greater than 0');
return parsed;
}
@Option({
flags: '-o, --output <output>',
description: 'Specify the output directory.',
defaultValue: './sessions',
})
private parseOutputOption(val: string) {
return val;
}
@Option({
flags: '--overwrite (-w)',
description: 'Overwrite existing sessions.',
defaultValue: false,
})
private parseOverwriteOption(val: string) {
return new CliUtilityService().parseBoolean(val);
}
@Help('afterAll')
private displayExamples() {
return `
Examples:
create -a accounts.txt -s ./secrets -p proxies.txt
create -a username:password -p proxies.txt`;
}
}
| src/commands/create/create-sessions.command.ts | Sadzurami-steam-sessions-creator-97b7294 | [
{
"filename": "src/commands/validate/validate-sessions.command.ts",
"retrieved_chunk": " public async run(args: string[], options: ValidateCommandOptions) {\n try {\n const sessionsOptionInput = await this.normalizeInput(options.sessions);\n const sessions = await this.sessionsImportService.loadSessions(sessionsOptionInput);\n if (sessions.length === 0) throw new Error('No sessions found');\n this.logger.log(`Sessions: ${sessions.length}`);\n await this.validateSessionsService.validateSessions(sessions);\n } catch (error) {\n this.logger.error(error.message);\n }",
"score": 56.32735740494052
},
{
"filename": "src/modules/export-sessions/export-sessions.service.ts",
"retrieved_chunk": " if (!directory || typeof directory !== 'string') throw new Error('Invalid output path');\n if (!path.isAbsolute(directory)) throw new Error('Output path must be absolute');\n try {\n await fs.mkdir(directory, { recursive: true });\n } catch (error) {\n throw new Error('Failed to create output directory', { cause: error });\n }\n this.outputPath = directory;\n }\n public async exportSession(session: Session) {",
"score": 39.19724534268681
},
{
"filename": "src/modules/accounts-import/accounts-import.service.ts",
"retrieved_chunk": " if (errors.length > 0 && accounts.length > 0) {\n this.logger.warn(`The following account sources are invalid:\\n${errors.join('\\n')}`);\n await delay(1000);\n const { confirm } = await inquirer.prompt({\n type: 'confirm',\n name: 'confirm',\n message: 'Continue with the valid accounts?',\n default: false,\n });\n if (!confirm) throw new Error('Aborted by user');",
"score": 39.18292431304699
},
{
"filename": "src/modules/accounts-import/accounts-import.service.ts",
"retrieved_chunk": " if (!Array.isArray(input)) input = [input];\n if (input.length === 0) return [];\n let accounts: Account[] = [];\n const errors: string[] = [];\n const readResults = await Promise.all(input.map((input) => this.readAccountsFromInput(input)));\n for (const result of readResults) {\n accounts.push(...result.values);\n errors.push(...result.errors);\n }\n accounts = this.removeDuplicates(accounts);",
"score": 38.06474716521108
},
{
"filename": "src/modules/secrets-import/secrets-import.service.ts",
"retrieved_chunk": " }\n secrets = this.removeDuplicates(secrets);\n if (errors.length > 0) {\n this.logger.warn(`The following secret sources are invalid:\\n${errors.join('\\n')}`);\n await delay(1000);\n const { confirm } = await inquirer.prompt({\n type: 'confirm',\n name: 'confirm',\n message:\n secrets.length > 0 ? `Continue with ${secrets.length} valid secrets?` : 'Continue without any secrets?',",
"score": 35.36993557422582
}
] | typescript | await this.exportSessionsService.setOutputPath(output); |
import { beforeEach, describe } from '@jest/globals';
import { ParentNode } from './ParentNode';
import { ChildNode } from './ChildNode';
interface DummyParent extends ParentNode {}
/**
*
*/
@ParentNode
class DummyParent {
/**
*
* @param _options - dummy options
*/
// eslint-disable-next-line @typescript-eslint/no-empty-function,@typescript-eslint/no-unused-vars,no-unused-vars
constructor(_options?: unknown) {}
}
interface DummyChild extends ChildNode {}
/**
*
*/
@ChildNode
class DummyChild {
/**
*
* @param _options - dummy options
*/
// eslint-disable-next-line @typescript-eslint/no-empty-function,@typescript-eslint/no-unused-vars,no-unused-vars
constructor(_options?: unknown) {}
}
describe('ParentNode and ChildNode integration', () => {
describe('child removal', () => {
let parent: DummyParent;
let child: DummyChild;
beforeEach(() => {
parent = new DummyParent();
child = new DummyChild({ parent });
});
it('should remove child from parent on child.remove() call', () => {
child.remove();
expect(parent.children).not.toContain(child);
});
it('should set child\'s parent to null on parent.removeChild() call', () => {
parent.removeChild(child);
expect(child.parent).toBeNull();
});
});
describe('child addition', () => {
let parent: DummyParent;
let child: DummyChild;
beforeEach(() => {
parent = new DummyParent();
child = new DummyChild();
});
it('should add child to parent on child.appendTo call', () => {
| child.appendTo(parent); |
expect(parent.children).toContain(child);
});
it('should set child\'s parent on parent.append() call', () => {
parent.append(child);
expect(child.parent).toEqual(parent);
});
it('should set child\'s parent on parent.insertAfter() call', () => {
const anotherChild = new DummyChild();
parent.append(child);
parent.insertAfter(child, anotherChild);
expect(anotherChild.parent).toEqual(parent);
});
});
describe('child transfer from parent to parent', () => {
let parent: DummyParent;
let anotherParent: DummyParent;
let child: DummyChild;
beforeEach(() => {
parent = new DummyParent();
child = new DummyChild({ parent });
anotherParent = new DummyParent();
});
it('should remove child from the old parent on new parent.append() call', () => {
anotherParent.append(child);
expect(parent.children).not.toContain(child);
});
it('should remove child from the old parent on new parent.insertAfter() call', () => {
const anotherChild = new DummyChild({ parent: anotherParent });
anotherParent.insertAfter(anotherChild, child);
expect(parent.children).not.toContain(child);
});
});
});
| src/entities/interfaces/integration.spec.ts | editor-js-document-model-4cb9623 | [
{
"filename": "src/entities/interfaces/ChildNode.spec.ts",
"retrieved_chunk": " expect(dummy.parent).toBeNull();\n });\n });\n describe('.appendTo()', () => {\n beforeEach(() => {\n dummy = new Dummy();\n });\n it('should call parent\\'s append method on appendTo call', () => {\n dummy.appendTo(parentMock);\n expect(parentMock.append).toBeCalledWith(dummy);",
"score": 35.01167387605475
},
{
"filename": "src/entities/interfaces/ParentNode.ts",
"retrieved_chunk": " if (index === -1) {\n return;\n }\n this.children.splice(index, 1);\n });\n this.children.push(...children);\n children.forEach(child => child.appendTo(this));\n }\n /**\n * Removes a child from the parent",
"score": 28.748188147452886
},
{
"filename": "src/entities/interfaces/ChildNode.spec.ts",
"retrieved_chunk": " });\n it('should set node\\'s parent on appendTo call', () => {\n dummy.appendTo(parentMock);\n expect(dummy.parent).toBe(parentMock);\n });\n it('should do nothing if parents are the same', () => {\n const dummyWithParent = new Dummy({\n parent: parentMock,\n });\n jest.resetAllMocks();",
"score": 27.693796793153403
},
{
"filename": "src/entities/interfaces/ParentNode.ts",
"retrieved_chunk": " */\n append(...children: ChildNode[]): void;\n /**\n * Removes a child from the parent\n *\n * @param child - child to remove\n */\n removeChild(child: ChildNode): void;\n /**\n * Inserts new children after specified target",
"score": 27.286947164930197
},
{
"filename": "src/entities/FormattingNode/index.ts",
"retrieved_chunk": " * @param index - char index\n * @private\n */\n #findChildByIndex(index: number): [child: InlineNode & ChildNode | null, offset: number] {\n let totalLength = 0;\n for (const child of this.children) {\n if (index <= child.length + totalLength) {\n return [child, totalLength];\n }\n totalLength += child.length;",
"score": 23.019292992768506
}
] | typescript | child.appendTo(parent); |
import { beforeEach, describe } from '@jest/globals';
import { ParentNode } from './ParentNode';
import { ChildNode } from './ChildNode';
interface DummyParent extends ParentNode {}
/**
*
*/
@ParentNode
class DummyParent {
/**
*
* @param _options - dummy options
*/
// eslint-disable-next-line @typescript-eslint/no-empty-function,@typescript-eslint/no-unused-vars,no-unused-vars
constructor(_options?: unknown) {}
}
interface DummyChild extends ChildNode {}
/**
*
*/
@ChildNode
class DummyChild {
/**
*
* @param _options - dummy options
*/
// eslint-disable-next-line @typescript-eslint/no-empty-function,@typescript-eslint/no-unused-vars,no-unused-vars
constructor(_options?: unknown) {}
}
describe('ParentNode and ChildNode integration', () => {
describe('child removal', () => {
let parent: DummyParent;
let child: DummyChild;
beforeEach(() => {
parent = new DummyParent();
child = new DummyChild({ parent });
});
it('should remove child from parent on child.remove() call', () => {
child.remove();
| expect(parent.children).not.toContain(child); |
});
it('should set child\'s parent to null on parent.removeChild() call', () => {
parent.removeChild(child);
expect(child.parent).toBeNull();
});
});
describe('child addition', () => {
let parent: DummyParent;
let child: DummyChild;
beforeEach(() => {
parent = new DummyParent();
child = new DummyChild();
});
it('should add child to parent on child.appendTo call', () => {
child.appendTo(parent);
expect(parent.children).toContain(child);
});
it('should set child\'s parent on parent.append() call', () => {
parent.append(child);
expect(child.parent).toEqual(parent);
});
it('should set child\'s parent on parent.insertAfter() call', () => {
const anotherChild = new DummyChild();
parent.append(child);
parent.insertAfter(child, anotherChild);
expect(anotherChild.parent).toEqual(parent);
});
});
describe('child transfer from parent to parent', () => {
let parent: DummyParent;
let anotherParent: DummyParent;
let child: DummyChild;
beforeEach(() => {
parent = new DummyParent();
child = new DummyChild({ parent });
anotherParent = new DummyParent();
});
it('should remove child from the old parent on new parent.append() call', () => {
anotherParent.append(child);
expect(parent.children).not.toContain(child);
});
it('should remove child from the old parent on new parent.insertAfter() call', () => {
const anotherChild = new DummyChild({ parent: anotherParent });
anotherParent.insertAfter(anotherChild, child);
expect(parent.children).not.toContain(child);
});
});
});
| src/entities/interfaces/integration.spec.ts | editor-js-document-model-4cb9623 | [
{
"filename": "src/entities/interfaces/ParentNode.ts",
"retrieved_chunk": " */\n append(...children: ChildNode[]): void;\n /**\n * Removes a child from the parent\n *\n * @param child - child to remove\n */\n removeChild(child: ChildNode): void;\n /**\n * Inserts new children after specified target",
"score": 41.013366759505224
},
{
"filename": "src/entities/interfaces/ParentNode.ts",
"retrieved_chunk": " *\n * @param child - child to remove\n */\n public removeChild(child: ChildNode): void {\n const index = this.children.indexOf(child);\n this.children.splice(index, 1);\n child.remove();\n }\n /**\n * Inserts new children after specified target",
"score": 34.77224065073404
},
{
"filename": "src/entities/interfaces/ChildNode.spec.ts",
"retrieved_chunk": " dummy = new Dummy();\n expect(dummy.parent).toBeNull();\n });\n it('should return parent passed via constructor', () => {\n dummy = new Dummy({ parent: parentMock });\n expect(dummy.parent).toEqual(parentMock);\n });\n });\n describe('.remove()', () => {\n beforeEach(() => {",
"score": 31.090341902382214
},
{
"filename": "src/entities/interfaces/ParentNode.ts",
"retrieved_chunk": " if (index === -1) {\n return;\n }\n this.children.splice(index, 1);\n });\n this.children.push(...children);\n children.forEach(child => child.appendTo(this));\n }\n /**\n * Removes a child from the parent",
"score": 30.671110409478104
},
{
"filename": "src/entities/interfaces/ChildNode.spec.ts",
"retrieved_chunk": " dummy = new Dummy({\n parent: parentMock,\n });\n });\n it('should call parent\\'s removeChild method', () => {\n dummy.remove();\n expect(parentMock.removeChild).toBeCalledWith(dummy);\n });\n it('should set node\\'s parent to null', () => {\n dummy.remove();",
"score": 29.36407587893509
}
] | typescript | expect(parent.children).not.toContain(child); |
import glob from 'fast-glob';
import { CliUtilityService, Command, CommandRunner, Help, Option } from 'nest-commander';
import pQueue from 'p-queue';
import path from 'path';
import { setTimeout as delay } from 'timers/promises';
import { Logger } from '@nestjs/common';
import { AccountsImportService } from '../../modules/accounts-import/accounts-import.service';
import { ExportSessionsService } from '../../modules/export-sessions/export-sessions.service';
import { ProxiesImportService } from '../../modules/proxies-import/proxies-import.service';
import { ProxiesService } from '../../modules/proxies/proxies.service';
import { SecretsImportService } from '../../modules/secrets-import/secrets-import.service';
import { CreateSessionsService } from './create-sessions.service';
interface CreateCommandOptions {
accounts: string | string[];
secrets: string | string[];
proxies: string | string[];
concurrency: number;
output: string;
overwrite: boolean;
}
@Command({
name: 'create',
description: 'Creates new sessions',
})
export class CreateSessionsCommand extends CommandRunner {
private readonly logger = new Logger(CreateSessionsCommand.name);
constructor(
private readonly createSessionsService: CreateSessionsService,
private readonly exportSessionsService: ExportSessionsService,
private readonly accountsImportService: AccountsImportService,
private readonly secretsImportService: SecretsImportService,
private readonly proxiesImportService: ProxiesImportService,
private readonly proxiesService: ProxiesService,
) {
super();
}
public async run(args: string[], options: CreateCommandOptions) {
try {
const accountsOptionInput = await this.normalizeInput(options.accounts);
let accounts = await this.accountsImportService.loadAccounts(accountsOptionInput);
if (accounts.length === 0) throw new Error('No accounts found');
this.logger.log(`Accounts: ${accounts.length}`);
const secretsOptionInput = await this.normalizeInput(options.secrets);
const secrets = await this.secretsImportService.loadSecrets(secretsOptionInput);
this.logger.log(`Secrets: ${secrets.length}`);
const outputOptionInput = options.output;
if (!outputOptionInput) throw new Error('Output path is required');
const output = path.resolve(outputOptionInput);
await this.exportSessionsService.setOutputPath(output);
this.logger.log(`Output: ${output}`);
const overwriteExistingSessions = options.overwrite;
if (!overwriteExistingSessions) {
const sessionsPaths = await this.normalizeInput(`${output}/*`);
const existingSessions = await this.accountsImportService.loadAccounts(sessionsPaths);
this.logger.log(`Ignoring existing sessions: ${existingSessions.length}`);
accounts = accounts.filter((account) => !existingSessions.some((a) => a.username === account.username));
}
if (accounts.length === 0) {
this.logger.log('No accounts to create');
return;
}
const proxiesOptionInput = await this.normalizeInput(options.proxies);
const proxies = await this.proxiesImportService.loadProxies(proxiesOptionInput);
| this.proxiesService.setProxies(proxies); |
this.logger.log(`Proxies: ${proxies.length}`);
const concurrencyOptionInput = options.concurrency;
const concurrency = proxies.length > 0 ? concurrencyOptionInput || Math.min(proxies.length * 3, 100) : 1;
this.logger.log(`Concurrency: ${concurrency}`);
this.logger.log(`Starting to create sessions for ${accounts.length} accounts`);
this.accountsImportService.assignSecretsToAccounts(accounts, secrets);
let success = 0;
let fails = 0;
let left = accounts.length;
const queue = new pQueue({ concurrency, interval: 10, intervalCap: 1 });
for (const account of accounts) {
queue.add(async () => {
try {
const session = await this.createSessionsService.createSession(account);
await this.exportSessionsService.exportSession(session);
success++;
this.logger.log(`Success: ${account.username}, left: ${--left}`);
} catch (error) {
fails++;
this.logger.warn(`Fail: ${account.username}, left: ${--left}`);
}
});
}
await queue.onIdle();
this.logger.log(`Session creation complete`);
this.logger.log(`Success: ${success}`);
this.logger.log(`Fails: ${fails}`);
await delay(1000);
} catch (error) {
this.logger.error(error.message);
}
}
private async normalizeInput(input: string | string[]) {
if (!input) return [];
if (!Array.isArray(input)) input = [input];
const filteredInput = input.filter((el) => typeof el === 'string' && el.trim() !== '');
if (filteredInput.length === 0) return [];
const nestedData = await Promise.all(
filteredInput.map(async (el) => {
el = el.trim();
// Possible glob pattern
const files = await glob(el);
if (files.length > 0) return files;
// Possible string
return el.split(/\s+|\r?\n/).map((line) => line.trim());
}),
);
return nestedData.flat();
}
@Option({
required: true,
flags: '-a, --accounts <accounts...>',
description: `Specify one or more accounts.
Account can be specified as:
- A simple string.
- A file path to load accounts from (one account per line).
- A glob pattern to load accounts from multiple files.
Supported formats:
- username:password
- username:password:sharedSecret
- username:password:sharedSecret:identitySecret
- ASF json`,
})
private parseAccountsOption(val: string, accumulator: string[] = []) {
accumulator.push(val);
return accumulator;
}
@Option({
flags: '-s, --secrets <secrets...>',
description: `Specify one or more secrets.
Secret can be specified as:
- A file path to load secrets from file.
- A glob pattern to load secrets from multiple files.
Supported formats:
- maFile
- ASF db`,
})
private parseSecretsOption(val: string, accumulator: string[] = []) {
accumulator.push(val);
return accumulator;
}
@Option({
flags: '-p, --proxies <proxies...>',
description: `Specify one or more proxies.
Proxy can be specified as:
- A string in the format <protocol>://<username>:<password>@<host>:<port>.
- A file path to load proxies from a text file.
Supported protocols:
- http
- https`,
})
private parseProxiesOption(val: string, accumulator: string[] = []) {
accumulator.push(val);
return accumulator;
}
@Option({
flags: '-c, --concurrency <concurrency>',
description: `Specify the number of concurrent runs.
Default: 1, or the number of proxies.`,
})
private parseConcurrencyOption(val: string) {
const parsed = parseInt(val, 10);
if (Number.isNaN(parsed)) throw new Error('Concurrency must be a number');
if (parsed < 1) throw new Error('Concurrency must be greater than 0');
return parsed;
}
@Option({
flags: '-o, --output <output>',
description: 'Specify the output directory.',
defaultValue: './sessions',
})
private parseOutputOption(val: string) {
return val;
}
@Option({
flags: '--overwrite (-w)',
description: 'Overwrite existing sessions.',
defaultValue: false,
})
private parseOverwriteOption(val: string) {
return new CliUtilityService().parseBoolean(val);
}
@Help('afterAll')
private displayExamples() {
return `
Examples:
create -a accounts.txt -s ./secrets -p proxies.txt
create -a username:password -p proxies.txt`;
}
}
| src/commands/create/create-sessions.command.ts | Sadzurami-steam-sessions-creator-97b7294 | [
{
"filename": "src/commands/validate/validate-sessions.command.ts",
"retrieved_chunk": " public async run(args: string[], options: ValidateCommandOptions) {\n try {\n const sessionsOptionInput = await this.normalizeInput(options.sessions);\n const sessions = await this.sessionsImportService.loadSessions(sessionsOptionInput);\n if (sessions.length === 0) throw new Error('No sessions found');\n this.logger.log(`Sessions: ${sessions.length}`);\n await this.validateSessionsService.validateSessions(sessions);\n } catch (error) {\n this.logger.error(error.message);\n }",
"score": 38.79779180949831
},
{
"filename": "src/modules/accounts-import/accounts-import.service.ts",
"retrieved_chunk": " }\n return accounts;\n }\n public assignSecretsToAccounts(accounts: Account[], secrets: Secrets[]) {\n const secretsMap = new Map<string, Secrets>();\n for (const secret of secrets) {\n secretsMap.set(secret.username, secret);\n // some existing steam-oriented apps are case-insensitive to usernames in secrets\n secretsMap.set(secret.username.toLowerCase(), secret);\n }",
"score": 31.126802727194416
},
{
"filename": "src/modules/accounts-import/accounts-import.service.ts",
"retrieved_chunk": " if (errors.length > 0 && accounts.length > 0) {\n this.logger.warn(`The following account sources are invalid:\\n${errors.join('\\n')}`);\n await delay(1000);\n const { confirm } = await inquirer.prompt({\n type: 'confirm',\n name: 'confirm',\n message: 'Continue with the valid accounts?',\n default: false,\n });\n if (!confirm) throw new Error('Aborted by user');",
"score": 31.019448705096703
},
{
"filename": "src/modules/accounts-import/accounts-import.service.ts",
"retrieved_chunk": " if (!Array.isArray(input)) input = [input];\n if (input.length === 0) return [];\n let accounts: Account[] = [];\n const errors: string[] = [];\n const readResults = await Promise.all(input.map((input) => this.readAccountsFromInput(input)));\n for (const result of readResults) {\n accounts.push(...result.values);\n errors.push(...result.errors);\n }\n accounts = this.removeDuplicates(accounts);",
"score": 29.232497881272693
},
{
"filename": "src/modules/accounts-import/accounts-import.service.ts",
"retrieved_chunk": " for (const account of accounts) {\n let secret = secretsMap.get(account.username);\n if (!secret) secret = secretsMap.get(account.username.toLowerCase());\n if (!secret) continue;\n account.sharedSecret = secret.sharedSecret;\n account.identitySecret = secret.identitySecret;\n }\n }\n private removeDuplicates(accounts: Account[]) {\n const map = new Map<string, Account>();",
"score": 29.163503262562646
}
] | typescript | this.proxiesService.setProxies(proxies); |
import glob from 'fast-glob';
import { CliUtilityService, Command, CommandRunner, Help, Option } from 'nest-commander';
import pQueue from 'p-queue';
import path from 'path';
import { setTimeout as delay } from 'timers/promises';
import { Logger } from '@nestjs/common';
import { AccountsImportService } from '../../modules/accounts-import/accounts-import.service';
import { ExportSessionsService } from '../../modules/export-sessions/export-sessions.service';
import { ProxiesImportService } from '../../modules/proxies-import/proxies-import.service';
import { ProxiesService } from '../../modules/proxies/proxies.service';
import { SecretsImportService } from '../../modules/secrets-import/secrets-import.service';
import { CreateSessionsService } from './create-sessions.service';
interface CreateCommandOptions {
accounts: string | string[];
secrets: string | string[];
proxies: string | string[];
concurrency: number;
output: string;
overwrite: boolean;
}
@Command({
name: 'create',
description: 'Creates new sessions',
})
export class CreateSessionsCommand extends CommandRunner {
private readonly logger = new Logger(CreateSessionsCommand.name);
constructor(
private readonly createSessionsService: CreateSessionsService,
private readonly exportSessionsService: ExportSessionsService,
private readonly accountsImportService: AccountsImportService,
private readonly secretsImportService: SecretsImportService,
private readonly proxiesImportService: ProxiesImportService,
private readonly proxiesService: ProxiesService,
) {
super();
}
public async run(args: string[], options: CreateCommandOptions) {
try {
const accountsOptionInput = await this.normalizeInput(options.accounts);
let accounts = await this.accountsImportService.loadAccounts(accountsOptionInput);
if (accounts.length === 0) throw new Error('No accounts found');
this.logger.log(`Accounts: ${accounts.length}`);
const secretsOptionInput = await this.normalizeInput(options.secrets);
const secrets = await this.secretsImportService.loadSecrets(secretsOptionInput);
this.logger.log(`Secrets: ${secrets.length}`);
const outputOptionInput = options.output;
if (!outputOptionInput) throw new Error('Output path is required');
const output = path.resolve(outputOptionInput);
await this.exportSessionsService.setOutputPath(output);
this.logger.log(`Output: ${output}`);
const overwriteExistingSessions = options.overwrite;
if (!overwriteExistingSessions) {
const sessionsPaths = await this.normalizeInput(`${output}/*`);
const existingSessions = await this.accountsImportService.loadAccounts(sessionsPaths);
this.logger.log(`Ignoring existing sessions: ${existingSessions.length}`);
accounts = accounts.filter((account) => !existingSessions.some((a) => a.username === account.username));
}
if (accounts.length === 0) {
this.logger.log('No accounts to create');
return;
}
const proxiesOptionInput = await this.normalizeInput(options.proxies);
const proxies = await this.proxiesImportService.loadProxies(proxiesOptionInput);
this.proxiesService.setProxies(proxies);
this.logger.log(`Proxies: ${proxies.length}`);
const concurrencyOptionInput = options.concurrency;
const concurrency = proxies.length > 0 ? concurrencyOptionInput || Math.min(proxies.length * 3, 100) : 1;
this.logger.log(`Concurrency: ${concurrency}`);
this.logger.log(`Starting to create sessions for ${accounts.length} accounts`);
| this.accountsImportService.assignSecretsToAccounts(accounts, secrets); |
let success = 0;
let fails = 0;
let left = accounts.length;
const queue = new pQueue({ concurrency, interval: 10, intervalCap: 1 });
for (const account of accounts) {
queue.add(async () => {
try {
const session = await this.createSessionsService.createSession(account);
await this.exportSessionsService.exportSession(session);
success++;
this.logger.log(`Success: ${account.username}, left: ${--left}`);
} catch (error) {
fails++;
this.logger.warn(`Fail: ${account.username}, left: ${--left}`);
}
});
}
await queue.onIdle();
this.logger.log(`Session creation complete`);
this.logger.log(`Success: ${success}`);
this.logger.log(`Fails: ${fails}`);
await delay(1000);
} catch (error) {
this.logger.error(error.message);
}
}
private async normalizeInput(input: string | string[]) {
if (!input) return [];
if (!Array.isArray(input)) input = [input];
const filteredInput = input.filter((el) => typeof el === 'string' && el.trim() !== '');
if (filteredInput.length === 0) return [];
const nestedData = await Promise.all(
filteredInput.map(async (el) => {
el = el.trim();
// Possible glob pattern
const files = await glob(el);
if (files.length > 0) return files;
// Possible string
return el.split(/\s+|\r?\n/).map((line) => line.trim());
}),
);
return nestedData.flat();
}
@Option({
required: true,
flags: '-a, --accounts <accounts...>',
description: `Specify one or more accounts.
Account can be specified as:
- A simple string.
- A file path to load accounts from (one account per line).
- A glob pattern to load accounts from multiple files.
Supported formats:
- username:password
- username:password:sharedSecret
- username:password:sharedSecret:identitySecret
- ASF json`,
})
private parseAccountsOption(val: string, accumulator: string[] = []) {
accumulator.push(val);
return accumulator;
}
@Option({
flags: '-s, --secrets <secrets...>',
description: `Specify one or more secrets.
Secret can be specified as:
- A file path to load secrets from file.
- A glob pattern to load secrets from multiple files.
Supported formats:
- maFile
- ASF db`,
})
private parseSecretsOption(val: string, accumulator: string[] = []) {
accumulator.push(val);
return accumulator;
}
@Option({
flags: '-p, --proxies <proxies...>',
description: `Specify one or more proxies.
Proxy can be specified as:
- A string in the format <protocol>://<username>:<password>@<host>:<port>.
- A file path to load proxies from a text file.
Supported protocols:
- http
- https`,
})
private parseProxiesOption(val: string, accumulator: string[] = []) {
accumulator.push(val);
return accumulator;
}
@Option({
flags: '-c, --concurrency <concurrency>',
description: `Specify the number of concurrent runs.
Default: 1, or the number of proxies.`,
})
private parseConcurrencyOption(val: string) {
const parsed = parseInt(val, 10);
if (Number.isNaN(parsed)) throw new Error('Concurrency must be a number');
if (parsed < 1) throw new Error('Concurrency must be greater than 0');
return parsed;
}
@Option({
flags: '-o, --output <output>',
description: 'Specify the output directory.',
defaultValue: './sessions',
})
private parseOutputOption(val: string) {
return val;
}
@Option({
flags: '--overwrite (-w)',
description: 'Overwrite existing sessions.',
defaultValue: false,
})
private parseOverwriteOption(val: string) {
return new CliUtilityService().parseBoolean(val);
}
@Help('afterAll')
private displayExamples() {
return `
Examples:
create -a accounts.txt -s ./secrets -p proxies.txt
create -a username:password -p proxies.txt`;
}
}
| src/commands/create/create-sessions.command.ts | Sadzurami-steam-sessions-creator-97b7294 | [
{
"filename": "src/commands/validate/validate-sessions.command.ts",
"retrieved_chunk": " public async run(args: string[], options: ValidateCommandOptions) {\n try {\n const sessionsOptionInput = await this.normalizeInput(options.sessions);\n const sessions = await this.sessionsImportService.loadSessions(sessionsOptionInput);\n if (sessions.length === 0) throw new Error('No sessions found');\n this.logger.log(`Sessions: ${sessions.length}`);\n await this.validateSessionsService.validateSessions(sessions);\n } catch (error) {\n this.logger.error(error.message);\n }",
"score": 51.48342883161752
},
{
"filename": "src/modules/proxies-import/proxies-import.service.ts",
"retrieved_chunk": " proxies = this.removeDuplicates(proxies);\n if (errors.length > 0) {\n this.logger.warn(`The following proxy sources are invalid:\\n${errors.join('\\n')}`);\n await delay(1000);\n const { confirm } = await inquirer.prompt({\n type: 'confirm',\n name: 'confirm',\n message: proxies.length > 0 ? `Continue with ${proxies.length} valid proxies ?` : 'Continue without proxies ?',\n default: false,\n });",
"score": 47.452847756756775
},
{
"filename": "src/modules/proxies/proxies.service.ts",
"retrieved_chunk": " public setProxies(proxies: Proxy[]) {\n if (proxies.length === 0) return;\n for (const proxy of proxies) {\n this.proxies.set(proxy.toString(), proxy);\n }\n }\n public async getProxy(): Promise<Proxy | null> {\n if (this.proxies.size === 0) return null;\n const proxy = await this.proxiesUsageQueue.add(() => this.fetchProxy());\n this.throttleProxy(proxy);",
"score": 46.67127733760509
},
{
"filename": "src/commands/create/create-sessions.module.ts",
"retrieved_chunk": "import { Module } from '@nestjs/common';\nimport { AccountsImportModule } from '../../modules/accounts-import/accounts-import.module';\nimport { ExportSessionsModule } from '../../modules/export-sessions/export-sessions.module';\nimport { ProxiesImportModule } from '../../modules/proxies-import/proxies-import.module';\nimport { ProxiesModule } from '../../modules/proxies/proxies.module';\nimport { SecretsImportModule } from '../../modules/secrets-import/secrets-import.module';\nimport { SteamTokensModule } from '../../modules/steam-tokens/steam-tokens.module';\nimport { CreateSessionsCommand } from './create-sessions.command';\nimport { CreateSessionsService } from './create-sessions.service';\n@Module({",
"score": 37.148105621581976
},
{
"filename": "src/modules/proxies-import/proxies-import.service.ts",
"retrieved_chunk": " if (!input) return [];\n if (!Array.isArray(input)) input = [input];\n if (input.length === 0) return [];\n let proxies: Proxy[] = [];\n const errors: string[] = [];\n const readResults = await Promise.all(input.map((input) => this.readProxyFromInput(input)));\n for (const result of readResults) {\n proxies.push(...result.values);\n errors.push(...result.errors);\n }",
"score": 34.566469198784716
}
] | typescript | this.accountsImportService.assignSecretsToAccounts(accounts, secrets); |
import crypto from "node:crypto";
import http from "node:http";
import https from "node:https";
import path from "node:path";
import { ServerResponse } from "node:http";
import { existsSync, readFileSync } from "node:fs";
import * as nanoid from "nanoid";
import {IncomingForm, Fields, Files} from "formidable";
import { MIME } from "./const";
export function sendJsonResponse(res: ServerResponse, error: object, status: number = 200) {
res.writeHead(status, {
"Content-type": "application/json",
"Access-Control-Allow-Origin": '*',
"Access-Control-Allow-Methods": 'GET, POST, PUT, DELETE',
"Access-Control-Allow-Headers": 'Content-type, authorization',
"Access-Control-Allow-Credentials": "true",
})
res.write(JSON.stringify(error), "utf-8");
}
export function sendEpubResponse(res: ServerResponse, epubBuffer: Buffer, code?: number) {
res.writeHead(code ?? 200, {
"Content-type": "application/epub+zip"
});
res.write(epubBuffer);
}
export function uuid(): string {
const nid = nanoid.customAlphabet("1234567890abcdef", 10);
let id = nid();
return id;
}
export async function getBufferFromRawURL(resourceUrl: string): Promise<Buffer | null> {
let url = new URL(resourceUrl);
try {
let buffArr: Buffer[] = await new Promise((resolve, reject) => {
let func = url.protocol === "https:" ? https : http;
func.get(url, (res) => {
let data: Buffer[] = [];
res.on("data", (d: Buffer) => data.push(d))
res.on("error", reject)
res.on("end", () => resolve(data))
})
})
let buffer = Buffer.concat(buffArr);
return buffer;
} catch (err) {
console.error(err);
return null;
}
}
export function sendHtmlResponse(res: ServerResponse, html: string, status: number = 200) {
res.writeHead(status, {
"Content-type": "text/html",
})
res.write(html, "utf-8");
}
export function parsePostData(req: http.IncomingMessage): Promise<Array<object>> {
let form = new IncomingForm({ multiples: false });
return new Promise((resolve, reject) => {
form.parse(req, (error, fields: Fields, files: Files) => {
if (error) reject(error);
resolve([fields, files]);
})
})
}
export function parseSimplePostData(req: http.IncomingMessage): Promise<Buffer> {
return new Promise((resolve, reject) => {
let data: Buffer[] = [];
req.on("data", (chunk: Buffer) => data.push(chunk))
req.on("end", () => {
const buf = Buffer.concat(data);
resolve(buf);
});
req.on("error", reject);
})
}
export function md5(data: string): string {
return crypto
.createHash("md5")
.update(data)
.digest("hex");
}
export function sendPublicFile(res: ServerResponse, filepath: string) {
let resourcePath = path.join(__dirname, "../../public", filepath)
if (!existsSync(resourcePath)) {
// we hope to handle the 404 state on the frontend
resourcePath = path.join(__dirname, "../../public", "index.html")
}
let ext = resourcePath.split('.').pop();
res. | writeHead(200, { "Content-type": MIME[ext] }); |
res.write(readFileSync(resourcePath))
}
| src/common/utils.ts | Aadv1k-quillia-52c5b34 | [
{
"filename": "src/common/const.ts",
"retrieved_chunk": "import dotenv from \"dotenv\";\nimport path from \"node:path\";\ndotenv.config({\n path: path.join(__dirname, \"../../.env\"),\n});\nexport const DB = {\n USER: process.env.PG_USER,\n PASSWORD: process.env.PG_PASSWORD,\n HOST: process.env.PG_HOST,\n PORT: parseInt(process.env.PG_PORT),",
"score": 19.895607556035262
},
{
"filename": "src/models/Bucket.ts",
"retrieved_chunk": " this.isLocal = false;\n if (!CLOUDINARY_CONF.API_SECRET) {\n this.isLocal = true;\n this.bucketPath = path.join(__dirname, \"../BUCKET\");\n } else {\n cloudinary.config({\n cloud_name: CLOUDINARY_CONF.CLOUD_NAME,\n api_key: CLOUDINARY_CONF.API_KEY,\n api_secret: CLOUDINARY_CONF.API_SECRET\n })",
"score": 18.153697700591987
},
{
"filename": "src/server.ts",
"retrieved_chunk": "import http from \"node:http\";\nimport { sendPublicFile } from \"./common/utils\";\nimport RouteSignup from \"./routes/Signup\"\nimport RouteLogin from \"./routes/Login\";\nimport RouteBooks from \"./routes/Books\";\nimport RouteIssue from \"./routes/Issue\";\nexport default http.createServer( async (req: http.IncomingMessage, res: http.ServerResponse) => {\n const url: string = new URL(`https://foo.com${req.url}`).pathname;\n if (url === \"/\") {\n sendPublicFile(res, \"index.html\");",
"score": 15.329193076718873
},
{
"filename": "src/models/Bucket.ts",
"retrieved_chunk": " let p = path.join(this.bucketPath, name);\n console.log(p);\n fs.writeFileSync(p, buffer);\n return p;\n }\n let response = new Promise((resolve, reject) => {\n const writeStream = this.bucket.uploader.upload_stream({\n public_id: name,\n resource_type: \"raw\",\n format: name.split('.').pop() // ideally \"unsafe\" files should not reach this point",
"score": 13.813616350208036
},
{
"filename": "src/common/const.ts",
"retrieved_chunk": " status: 401,\n error: \"unauthorized\"\n },\n userNotFound: {\n message: \"unable to find user\",\n status: 404,\n error: \"user-not-found\"\n },\n methodNotAllowed: {\n message: \"the method is not allowed for the endpoint\",",
"score": 13.453487930936673
}
] | typescript | writeHead(200, { "Content-type": MIME[ext] }); |
import pRetry from 'p-retry';
import { setTimeout as delay } from 'timers/promises';
import { Injectable, OnModuleInit } from '@nestjs/common';
import { ConfigService } from '@nestjs/config';
import { Account } from '../../interfaces/account.interface';
import { Session as ISession } from '../../interfaces/session.interface';
import { SteamTokensService } from '../../modules/steam-tokens/steam-tokens.service';
@Injectable()
export class CreateSessionsService implements OnModuleInit {
private schemaVersion: number;
constructor(private readonly steamTokensService: SteamTokensService, private readonly configService: ConfigService) {}
public onModuleInit() {
this.schemaVersion = this.configService.getOrThrow<number>('session.schemaVersion');
}
public async createSession(account: Account) {
try {
// we need to wait at least 30 seconds between each refresh token creation
// because steam has a limit of logins for one account once per 30 seconds
// probably it's fair only for accounts with 2FA enabled
const delayMs = 1000 * 31;
const desktopRefreshToken = await this.createRefreshToken(account, 'desktop');
await delay(delayMs);
const webRefreshToken = await this.createRefreshToken(account, 'web');
await delay(delayMs);
const mobileRefreshToken = await this.createRefreshToken(account, 'mobile');
await delay(delayMs);
const steamId = this.getSteamIdFromRefreshToken(webRefreshToken);
const schemaVersion = this.schemaVersion;
const session: ISession = {
username: account.username,
password: account.password,
sharedSecret: account.sharedSecret || null,
identitySecret: account.identitySecret || null,
steamId,
webRefreshToken,
mobileRefreshToken,
desktopRefreshToken,
schemaVersion,
};
return session;
} catch (error) {
throw new Error('Failed to create session', { cause: error });
}
}
private async createRefreshToken(account: Account, platform: 'web' | 'mobile' | 'desktop') {
try {
return await pRetry(() => this.steamTokensService.createRefreshToken(account, platform), {
retries: 3,
minTimeout: 31000,
maxTimeout: 31000,
});
} catch (error) {
throw new Error('Failed to create refresh token', { cause: error });
}
}
private getSteamIdFromRefreshToken(token: string) {
try {
const { sub: steamId | } = this.steamTokensService.decodeRefreshToken(token); |
if (!steamId) throw new Error('SteamId is missing from refresh token');
return steamId;
} catch (error) {
throw new Error('Failed to get steamId from refresh token', { cause: error });
}
}
}
| src/commands/create/create-sessions.service.ts | Sadzurami-steam-sessions-creator-97b7294 | [
{
"filename": "src/modules/steam-tokens/steam-tokens.service.ts",
"retrieved_chunk": " return JSON.parse(headerJson);\n } catch (error) {\n throw new Error('An error occurred while decoding refresh token', { cause: error });\n }\n }\n public validateRefreshToken(token: string) {\n try {\n const { iss, sub, exp, aud } = this.decodeRefreshToken(token);\n if (!iss || !sub || !exp || !aud) return false;\n if (iss !== 'steam') return false;",
"score": 41.655395973066426
},
{
"filename": "src/modules/steam-tokens/steam-tokens.service.ts",
"retrieved_chunk": " .then((result) => result.actionRequired && loginSession.emit('error', new Error('Guard action required')))\n .catch((error) => loginSession.emit('error', error));\n await pEvent(loginSession, 'authenticated', { rejectionEvents: ['error', 'timeout'], timeout: 35000 });\n const refreshToken = loginSession.refreshToken;\n if (!refreshToken) throw new Error('Refresh token is empty');\n return refreshToken;\n } catch (error) {\n if (error.eresult === EResult.RateLimitExceeded) this.throttleConnection(connectionId, 31 * 60 * 1000);\n throw new Error('Failed to create refresh token', { cause: error });\n } finally {",
"score": 32.689452475980794
},
{
"filename": "src/modules/steam-tokens/steam-tokens.service.ts",
"retrieved_chunk": " if (exp < Math.floor(Date.now() / 1000)) return false;\n if (!aud.includes('renew')) return false;\n return true;\n } catch (error) {\n return false;\n }\n }\n public getRefreshTokenExpiration(token: string) {\n try {\n const { exp } = this.decodeRefreshToken(token);",
"score": 25.693903591822522
},
{
"filename": "src/commands/validate/validate-sessions.service.ts",
"retrieved_chunk": " errors.push('Invalid mobile refresh token');\n }\n const tokenExpiration = this.steamTokensService.getRefreshTokenExpiration(session.mobileRefreshToken);\n if (tokenExpiration > expires) expires = tokenExpiration;\n }\n if (session.webRefreshToken) {\n if (!this.steamTokensService.validateRefreshToken(session.webRefreshToken)) {\n errors.push('Invalid web refresh token');\n }\n const tokenExpiration = this.steamTokensService.getRefreshTokenExpiration(session.webRefreshToken);",
"score": 24.912664998417785
},
{
"filename": "src/modules/steam-tokens/steam-tokens.service.ts",
"retrieved_chunk": " if (loginSession) loginSession.cancelLoginAttempt();\n }\n }\n public decodeRefreshToken(token: string) {\n try {\n const parts = token.split('.');\n if (parts.length !== 3) throw new Error('Invalid token');\n const headerBase64Url = parts[1];\n const headerBase64 = headerBase64Url.replace(/-/g, '+').replace(/_/g, '/');\n const headerJson = Buffer.from(headerBase64, 'base64').toString('utf-8');",
"score": 24.34304073894446
}
] | typescript | } = this.steamTokensService.decodeRefreshToken(token); |
import { Client } from "pg";
import { DB as DBConfig } from "../common/const";
import { Book } from "../common/types";
export default class BookModel {
private readonly client: Client;
constructor() {
this.client = new Client({
host: DBConfig.HOST,
user: DBConfig.USER,
password: DBConfig.PASSWORD,
database: DBConfig.DB_NAME,
port: DBConfig.PORT,
ssl: true
})
}
async init(): Promise<void> {
try {
await this.client.connect();
await this.client.query(`CREATE TABLE IF NOT EXISTS books (
id VARCHAR(255) UNIQUE NOT NULL,
userid VARCHAR(255) UNIQUE NOT NULL,
title VARCHAR(255) NOT NULL,
author VARCHAR(255) NOT NULL,
signature VARCHAR(255) NOT NULL,
path VARCHAR(255) NOT NULL,
cover VARCHAR(255) NOT NULL
)
`);
} catch (error) {
throw error;
}
}
async bookExists(bookid: string): Promise<boolean> {
const result = await this.client.query("SELECT EXISTS (SELECT 1 FROM books WHERE id = $1)", [bookid])
return result.rows[0].exists
}
| async getBooks(): Promise<Array<Book> | null> { |
try {
let response = await this.client.query("SELECT * FROM books");
return response.rows;
} catch (error) {
console.error(error);
return;
}
}
async pushBook(book: Book): Promise<Book | null> {
try {
await this.client.query(`
INSERT INTO books (id, userid, author, title, path, cover, signature)
VALUES ($1, $2, $3, $4, $5, $6, $7)`,
[book.id, book.userid, book.author, book.title, book.path, book?.cover ?? "", book.signature]
)
return book;
} catch (error) {
console.error(error);
return null;
}
}
async deleteBook(bookid: string, userid?: string) {
try {
await this.client.query(`DELETE FROM books WHERE id = $1 ${userid && "AND userid = $2"}`, [bookid, userid ?? ""]);
return bookid;
} catch (error) {
console.error(error);
return null;
}
}
async getBook(bookid: string, sig?: string): Promise<Book | null> {
try {
const response = await this.client.query(`SELECT * FROM books WHERE ${bookid ? "id = $1" : "signature = $1"}`, [bookid || sig]);
return response.rows[0];
} catch (error) {
console.error(error);
return null;
}
}
async updateBook() { /* TODO */ }
async close(): Promise<void> {
await this.client.end();
}
}
| src/models/BookModel.ts | Aadv1k-quillia-52c5b34 | [
{
"filename": "src/models/UserModel.ts",
"retrieved_chunk": " const result = await this.client.query(\"SELECT EXISTS (SELECT 1 FROM users WHERE email = $1)\", [email])\n return result.rows[0].exists\n } \n async getUserByID(id: string): Promise<User | null> {\n try {\n const response = await this.client.query(\n `SELECT * FROM users \n WHERE id = $1`, \n [id]\n );",
"score": 53.699621728019885
},
{
"filename": "src/models/IssueModel.ts",
"retrieved_chunk": " const result = await this.client.query(\"SELECT EXISTS (SELECT 1 FROM issues WHERE id = $1)\", [issueid])\n return result.rows[0].exists\n } \n async pushIssue(data: Issue): Promise<Issue | null> {\n try {\n await this.client.query(\n \"INSERT INTO issues (id, lenderid, borrowerid, bookid) VALUES ($1, $2, $3, $4)\",\n [data.id, data.lenderid, data.borrowerid, data.bookid]\n );\n return data;",
"score": 48.15824265239412
},
{
"filename": "src/models/UserModel.ts",
"retrieved_chunk": " return response.rows[0]\n } catch (error) {\n return null;\n }\n }\n async getUser(email: string, id?: string): Promise<User | null> {\n try {\n const response = await this.client.query(\n `SELECT * FROM users \n WHERE email = $1",
"score": 36.599205391025166
},
{
"filename": "src/models/IssueModel.ts",
"retrieved_chunk": " async getIssues(borrowerid: string): Promise<Array<Issue> | null> {\n try {\n let response = await this.client.query(\n \"SELECT * FROM issues WHERE borrowerid = $1\",\n [borrowerid]\n );\n return response.rows;\n } catch (error) {\n console.error(error);\n return null;",
"score": 34.70826396043995
},
{
"filename": "src/models/IssueModel.ts",
"retrieved_chunk": " }\n }\n async getIssue(\n lenderid: string,\n bookid?: string,\n borrowerid?: string,\n ): Promise<Issue | null> {\n try {\n let response = await this.client.query(\n `SELECT * FROM issues ",
"score": 28.815907840915806
}
] | typescript | async getBooks(): Promise<Array<Book> | null> { |
import pRetry from 'p-retry';
import { setTimeout as delay } from 'timers/promises';
import { Injectable, OnModuleInit } from '@nestjs/common';
import { ConfigService } from '@nestjs/config';
import { Account } from '../../interfaces/account.interface';
import { Session as ISession } from '../../interfaces/session.interface';
import { SteamTokensService } from '../../modules/steam-tokens/steam-tokens.service';
@Injectable()
export class CreateSessionsService implements OnModuleInit {
private schemaVersion: number;
constructor(private readonly steamTokensService: SteamTokensService, private readonly configService: ConfigService) {}
public onModuleInit() {
this.schemaVersion = this.configService.getOrThrow<number>('session.schemaVersion');
}
public async createSession(account: Account) {
try {
// we need to wait at least 30 seconds between each refresh token creation
// because steam has a limit of logins for one account once per 30 seconds
// probably it's fair only for accounts with 2FA enabled
const delayMs = 1000 * 31;
const desktopRefreshToken = await this.createRefreshToken(account, 'desktop');
await delay(delayMs);
const webRefreshToken = await this.createRefreshToken(account, 'web');
await delay(delayMs);
const mobileRefreshToken = await this.createRefreshToken(account, 'mobile');
await delay(delayMs);
const steamId = this.getSteamIdFromRefreshToken(webRefreshToken);
const schemaVersion = this.schemaVersion;
const session: ISession = {
username: account.username,
password: account.password,
sharedSecret: account.sharedSecret || null,
identitySecret: account.identitySecret || null,
steamId,
webRefreshToken,
mobileRefreshToken,
desktopRefreshToken,
schemaVersion,
};
return session;
} catch (error) {
throw new Error('Failed to create session', { cause: error });
}
}
private async createRefreshToken(account: Account, platform: 'web' | 'mobile' | 'desktop') {
try {
return await pRetry(( | ) => this.steamTokensService.createRefreshToken(account, platform), { |
retries: 3,
minTimeout: 31000,
maxTimeout: 31000,
});
} catch (error) {
throw new Error('Failed to create refresh token', { cause: error });
}
}
private getSteamIdFromRefreshToken(token: string) {
try {
const { sub: steamId } = this.steamTokensService.decodeRefreshToken(token);
if (!steamId) throw new Error('SteamId is missing from refresh token');
return steamId;
} catch (error) {
throw new Error('Failed to get steamId from refresh token', { cause: error });
}
}
}
| src/commands/create/create-sessions.service.ts | Sadzurami-steam-sessions-creator-97b7294 | [
{
"filename": "src/modules/steam-tokens/steam-tokens.service.ts",
"retrieved_chunk": " return exp * 1000;\n } catch (error) {\n return 0;\n }\n }\n private inferLoginSessionPlatform(platform: 'web' | 'mobile' | 'desktop'): EAuthTokenPlatformType {\n if (platform === 'web') return EAuthTokenPlatformType.WebBrowser;\n else if (platform === 'mobile') return EAuthTokenPlatformType.MobileApp;\n else if (platform === 'desktop') return EAuthTokenPlatformType.SteamClient;\n else throw new Error('Invalid platform');",
"score": 36.97474362390261
},
{
"filename": "src/modules/steam-tokens/steam-tokens.service.ts",
"retrieved_chunk": " private readonly connectionThrottlingTimeout = 31 * 1000;\n constructor(\n @Inject(CACHE_MANAGER) private throttledConnections: Cache,\n private readonly proxiesService: ProxiesService,\n ) {}\n public async createRefreshToken(account: Account, platform: 'web' | 'mobile' | 'desktop') {\n const loginSessionPlatform = this.inferLoginSessionPlatform(platform);\n const proxy = await this.proxiesService.getProxy();\n const connectionId = this.inferConnectionId((proxy || '').toString());\n await this.waitConnectionLimitReset(connectionId);",
"score": 36.95452169791945
},
{
"filename": "src/modules/export-sessions/export-sessions.service.ts",
"retrieved_chunk": " const serializedSession = this.serializeSession(session);\n const sessionPath = path.resolve(this.outputPath, `${session.username}.${this.fileExtension}`);\n try {\n await fs.writeFile(sessionPath, serializedSession);\n } catch (error) {\n throw new Error('Failed to write session to file', { cause: error });\n }\n }\n private serializeSession(session: Session) {\n const serializedObject = Object.fromEntries(",
"score": 26.10530935036252
},
{
"filename": "src/modules/export-sessions/export-sessions.service.ts",
"retrieved_chunk": " if (!directory || typeof directory !== 'string') throw new Error('Invalid output path');\n if (!path.isAbsolute(directory)) throw new Error('Output path must be absolute');\n try {\n await fs.mkdir(directory, { recursive: true });\n } catch (error) {\n throw new Error('Failed to create output directory', { cause: error });\n }\n this.outputPath = directory;\n }\n public async exportSession(session: Session) {",
"score": 25.134891424804334
},
{
"filename": "src/modules/steam-tokens/steam-tokens.service.ts",
"retrieved_chunk": " .then((result) => result.actionRequired && loginSession.emit('error', new Error('Guard action required')))\n .catch((error) => loginSession.emit('error', error));\n await pEvent(loginSession, 'authenticated', { rejectionEvents: ['error', 'timeout'], timeout: 35000 });\n const refreshToken = loginSession.refreshToken;\n if (!refreshToken) throw new Error('Refresh token is empty');\n return refreshToken;\n } catch (error) {\n if (error.eresult === EResult.RateLimitExceeded) this.throttleConnection(connectionId, 31 * 60 * 1000);\n throw new Error('Failed to create refresh token', { cause: error });\n } finally {",
"score": 20.103858217172387
}
] | typescript | ) => this.steamTokensService.createRefreshToken(account, platform), { |
import BookModel from "../models/BookModel";
import Bucket from "../models/Bucket";
import Token from "../lib/GenerateToken";
import { ERROR, MAX_EPUB_SIZE_MB } from "../common/const";
import { TokStatus, Book } from "../common/types";
import {
sendJsonResponse,
parseSimplePostData,
md5,
uuid,
} from "../common/utils";
import filetype from "file-type-cjs";
import fs from "node:fs";
import EPub from "epub";
import os from "node:os";
import path from "node:path";
import crypto from "node:crypto";
import { exec } from "node:child_process";
import http from "node:http";
async function getEpubCoverFromEpubFile_UNIX(
epubFilepath: string
): Promise<[Buffer, string] | null> {
let randomString = crypto.randomBytes(16).toString("hex");
let tempDir = path.join(os.tmpdir(), `tmp-${randomString}`);
fs.mkdirSync(tempDir);
let unzipCMD = `unzip -q ${epubFilepath} -d ${tempDir}`;
let unzipCMDExec = new Promise((resolve, reject) => {
exec(unzipCMD, (err: any, stdout: any, stderr: any) => {
if (err) reject(err);
resolve(stdout);
});
});
try {
await unzipCMDExec;
} catch (err) {
console.error(err);
fs.rmSync(tempDir, { recursive: true }); // we r good boys!
return null;
}
let findCMD = `find ${tempDir} -type f \\( -iname \\*.jpeg -o -iname \\*.jpg -o -iname \\*.png \\) | grep -Ei 'cover\\.|index-1_1'`;
let findCMDExec: Promise<string> = new Promise((resolve, reject) => {
exec(findCMD, (err: any, stdout: any, stderr: any) => {
if (err) reject(err);
resolve(stdout);
});
});
let selectedFilePath: string;
try {
selectedFilePath = await findCMDExec;
selectedFilePath = selectedFilePath.trim();
} catch (err) {
console.error(err);
fs.rmSync(tempDir, { recursive: true }); // we r good boys!
return null;
}
let ret: [Buffer, string] = [
Buffer.from(fs.readFileSync(selectedFilePath)),
selectedFilePath,
];
fs.rmSync(tempDir, { recursive: true }); // we r good boys!
return ret;
}
export default async function (
req: http.IncomingMessage,
res: http.ServerResponse
) {
const BOOK_DB = new BookModel();
const BUCKET | = new Bucket(); |
await BOOK_DB.init();
await BUCKET.init();
try {
if (req.method === "GET") {
try {
let userBooks = await BOOK_DB.getBooks();
userBooks = userBooks.map((e) => {
delete e.path;
return e;
});
sendJsonResponse(res, userBooks, 200);
} catch (error) {
console.error(error);
sendJsonResponse(res, ERROR.internalErr);
}
} else if (req.method === "POST") {
const authorization = req.headers?.authorization;
const authToken = authorization?.split(" ")?.pop();
if (!authorization || !authToken) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const token = new Token();
const tokenStatus: TokStatus = token.verify(authToken);
if (
tokenStatus === TokStatus.INVALID ||
tokenStatus === TokStatus.INVALID_SIG
) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const parsedAuthToken: any = token.UNSAFE_parse(authToken);
let epubBuffer: Buffer;
epubBuffer = await parseSimplePostData(req);
let epubSizeInMB = Math.ceil(epubBuffer.length / 1e6);
let bufferMime = await filetype.fromBuffer(epubBuffer);
if (bufferMime.mime != "application/epub+zip") {
sendJsonResponse(res, ERROR.invalidMimeForResource, 415);
return;
}
if (epubSizeInMB > MAX_EPUB_SIZE_MB) {
sendJsonResponse(res, ERROR.fileTooLarge, 400);
return;
}
let randomString = crypto.randomBytes(16).toString("hex");
const tempEpubFilePath = path.join(os.tmpdir(), `tmp-${randomString}.epub`);
fs.writeFileSync(tempEpubFilePath, epubBuffer);
const epub: any = await new Promise((resolve, reject) => {
const epub = new EPub(tempEpubFilePath);
epub.on("end", () => resolve(epub));
epub.on("error", reject);
epub.parse();
});
let epubCoverBuffer = await getEpubCoverFromEpubFile_UNIX(tempEpubFilePath);
console.log(epubCoverBuffer);
let epubSignature = md5(epubBuffer.toString("hex"));
let foundBook = await BOOK_DB.getBook("", epubSignature);
if (foundBook) {
sendJsonResponse(
res,
{
...ERROR.resourceExists,
data: {
id: foundBook.id,
},
},
409
);
return;
}
let epubFilePermalink = await BUCKET.pushBufferWithName(
epubBuffer,
`${epubSignature}.epub`
);
let epubCoverPermalink = null;
if (epubCoverBuffer) {
epubCoverPermalink = await BUCKET.pushBufferWithName(
epubCoverBuffer[0],
`${epubSignature}.${epubCoverBuffer[1].split(".").pop()}`
);
}
let epubID = uuid();
let epubEntry: Book = {
id: epubID,
userid: parsedAuthToken.id,
title: epub.metadata?.title ?? epubID.split("-").pop(),
author: epub.metadata?.creator ?? parsedAuthToken.email,
path: epubFilePermalink,
signature: epubSignature,
cover: epubCoverPermalink,
};
const pushed = await BOOK_DB.pushBook(epubEntry);
if (!pushed) {
sendJsonResponse(res, ERROR.internalErr, 500);
return;
}
sendJsonResponse(
res,
{
error: null,
message: `successfully published a book of id ${epubEntry.id}`,
data: {
id: epubEntry.id,
},
},
201
);
} else if (req.method === "DELETE") {
const authorization = req.headers?.authorization;
const authToken = authorization?.split(" ")?.pop();
if (!authorization || !authToken) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const token = new Token();
const tokenStatus: TokStatus = token.verify(authToken);
if (
tokenStatus === TokStatus.INVALID ||
tokenStatus === TokStatus.INVALID_SIG
) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const parsedAuthToken: any = token.UNSAFE_parse(authToken);
let body: Buffer;
body = await parseSimplePostData(req);
let data: any;
try {
data = JSON.parse(body.toString());
} catch {
sendJsonResponse(res, ERROR.invalidJSONData, 400);
return;
}
if (!data.bookid) {
sendJsonResponse(res, ERROR.badRequest, 400);
return;
}
let bookDeleted = await BOOK_DB.deleteBook(data.bookid, parsedAuthToken.id);
if (!bookDeleted) {
sendJsonResponse(res, {
error: "unable-to-delete-book",
message: `was unable to delete book ${data.bookid}, perhaps the id was invalid?`,
status: 404
}, 404)
return;
}
sendJsonResponse(res, {
error: null,
message: `successfully deleted book of id ${data.bookid}`,
status: 204,
data: {
id: data.bookid,
}
}, 204)
}
} finally {
await BOOK_DB.close();
}
}
| src/routes/Books.ts | Aadv1k-quillia-52c5b34 | [
{
"filename": "src/routes/Issue.ts",
"retrieved_chunk": " uuid,\n getBufferFromRawURL,\n} from \"../common/utils\";\nimport http from \"node:http\";\nimport https from \"node:https\";\nexport default async function (\n req: http.IncomingMessage,\n res: http.ServerResponse\n) {\n const ISSUE_DB = new IssueModel();",
"score": 24.80824831581957
},
{
"filename": "src/routes/Login.ts",
"retrieved_chunk": "import http from \"node:http\";\nimport { sendJsonResponse, md5, parseSimplePostData } from \"../common/utils\";\nimport Token from \"../lib/GenerateToken\";\nimport { ERROR } from \"../common/const\";\nimport UserModel from \"../models/UserModel\";\nimport { User } from \"../common/types\";\nexport default async function (\n req: http.IncomingMessage,\n res: http.ServerResponse\n) {",
"score": 19.15940983223921
},
{
"filename": "src/server.ts",
"retrieved_chunk": "import http from \"node:http\";\nimport { sendPublicFile } from \"./common/utils\";\nimport RouteSignup from \"./routes/Signup\"\nimport RouteLogin from \"./routes/Login\";\nimport RouteBooks from \"./routes/Books\";\nimport RouteIssue from \"./routes/Issue\";\nexport default http.createServer( async (req: http.IncomingMessage, res: http.ServerResponse) => {\n const url: string = new URL(`https://foo.com${req.url}`).pathname;\n if (url === \"/\") {\n sendPublicFile(res, \"index.html\");",
"score": 18.69101416035368
},
{
"filename": "src/routes/Signup.ts",
"retrieved_chunk": "import http from \"node:http\";\nimport { sendJsonResponse, md5, uuid, parseSimplePostData } from \"../common/utils\";\nimport { ERROR } from \"../common/const\";\nimport { User } from \"../common/types\";\nimport UserModel from \"../models/UserModel\";\nimport Token from \"../lib/GenerateToken\";\nimport isEmailValid from \"../lib/isEmailValid\";\nexport default async function (\n req: http.IncomingMessage,\n res: http.ServerResponse",
"score": 18.06332426978546
},
{
"filename": "src/common/utils.ts",
"retrieved_chunk": " resolve([fields, files]);\n })\n })\n}\nexport function parseSimplePostData(req: http.IncomingMessage): Promise<Buffer> {\n return new Promise((resolve, reject) => {\n let data: Buffer[] = [];\n req.on(\"data\", (chunk: Buffer) => data.push(chunk))\n req.on(\"end\", () => { \n const buf = Buffer.concat(data);",
"score": 15.280106016816301
}
] | typescript | = new Bucket(); |
import { Client } from "pg";
import { DB as DBConfig } from "../common/const";
import { Issue } from "../common/types";
export default class IssueModel {
private readonly client: Client;
constructor() {
this.client = new Client({
host: DBConfig.HOST,
user: DBConfig.USER,
password: DBConfig.PASSWORD,
database: DBConfig.DB_NAME,
port: DBConfig.PORT,
ssl: true,
});
}
async init(): Promise<void> {
try {
await this.client.connect();
await this.client.query(`CREATE TABLE IF NOT EXISTS issues (
id VARCHAR(255) UNIQUE NOT NULL,
lenderid VARCHAR(255) NOT NULL,
borrowerid VARCHAR(255) NOT NULL,
bookid VARCHAR(255) NOT NULL
)
`);
} catch (error) {
throw error;
}
}
async issueExists(issueid: string): Promise<boolean> {
const result = await this.client.query("SELECT EXISTS (SELECT 1 FROM issues WHERE id = $1)", [issueid])
return result.rows[0].exists
}
async pushIssue | (data: Issue): Promise<Issue | null> { |
try {
await this.client.query(
"INSERT INTO issues (id, lenderid, borrowerid, bookid) VALUES ($1, $2, $3, $4)",
[data.id, data.lenderid, data.borrowerid, data.bookid]
);
return data;
} catch (error) {
console.error(error);
return null;
}
}
async removeIssue(
issueid: string,
borrowerid?: string,
lenderid?: string
): Promise<void | null> {
try {
await this.client.query(
"DELETE FROM issues WHERE issueid = $1 OR borrowerid = $2 OR lenderid = $3",
[issueid ?? "", borrowerid ?? "", lenderid ?? ""]
);
} catch (error) {
console.error(error);
return null;
}
}
async getIssues(borrowerid: string): Promise<Array<Issue> | null> {
try {
let response = await this.client.query(
"SELECT * FROM issues WHERE borrowerid = $1",
[borrowerid]
);
return response.rows;
} catch (error) {
console.error(error);
return null;
}
}
async getIssue(
lenderid: string,
bookid?: string,
borrowerid?: string,
): Promise<Issue | null> {
try {
let response = await this.client.query(
`SELECT * FROM issues
WHERE borrowerid = $1
AND bookid = $2
`,
[borrowerid ?? null, bookid ?? null]
);
return response.rows[0];
} catch (error) {
console.error(error);
return null;
}
}
async close(): Promise<void> {
await this.client.end();
}
}
| src/models/IssueModel.ts | Aadv1k-quillia-52c5b34 | [
{
"filename": "src/models/BookModel.ts",
"retrieved_chunk": " }\n }\n async bookExists(bookid: string): Promise<boolean> {\n const result = await this.client.query(\"SELECT EXISTS (SELECT 1 FROM books WHERE id = $1)\", [bookid])\n return result.rows[0].exists\n } \n async getBooks(): Promise<Array<Book> | null> {\n try {\n let response = await this.client.query(\"SELECT * FROM books\");\n return response.rows;",
"score": 57.224394669493165
},
{
"filename": "src/models/UserModel.ts",
"retrieved_chunk": " const result = await this.client.query(\"SELECT EXISTS (SELECT 1 FROM users WHERE email = $1)\", [email])\n return result.rows[0].exists\n } \n async getUserByID(id: string): Promise<User | null> {\n try {\n const response = await this.client.query(\n `SELECT * FROM users \n WHERE id = $1`, \n [id]\n );",
"score": 56.324781760242765
},
{
"filename": "src/models/UserModel.ts",
"retrieved_chunk": " return response.rows[0]\n } catch (error) {\n return null;\n }\n }\n async getUser(email: string, id?: string): Promise<User | null> {\n try {\n const response = await this.client.query(\n `SELECT * FROM users \n WHERE email = $1",
"score": 39.35793256405894
},
{
"filename": "src/models/BookModel.ts",
"retrieved_chunk": " await this.client.query(`DELETE FROM books WHERE id = $1 ${userid && \"AND userid = $2\"}`, [bookid, userid ?? \"\"]);\n return bookid;\n } catch (error) {\n console.error(error); \n return null;\n }\n }\n async getBook(bookid: string, sig?: string): Promise<Book | null> {\n try {\n const response = await this.client.query(`SELECT * FROM books WHERE ${bookid ? \"id = $1\" : \"signature = $1\"}`, [bookid || sig]);",
"score": 34.028853557645164
},
{
"filename": "src/models/UserModel.ts",
"retrieved_chunk": " let data = await this.client.query(`SELECT * FROM users`)\n return data.rows;\n } catch (error) {\n console.error(error);\n return null;\n }\n }\n async updateUser(user: User, toUpdate: object ) {\n let blobs = Object.keys(toUpdate).map((e, i) => {return `${e} = \\$${i+1}`})\n let toUpdateQuery = blobs.join(\" AND \");",
"score": 26.41921940834653
}
] | typescript | (data: Issue): Promise<Issue | null> { |
import BookModel from "../models/BookModel";
import Bucket from "../models/Bucket";
import Token from "../lib/GenerateToken";
import { ERROR, MAX_EPUB_SIZE_MB } from "../common/const";
import { TokStatus, Book } from "../common/types";
import {
sendJsonResponse,
parseSimplePostData,
md5,
uuid,
} from "../common/utils";
import filetype from "file-type-cjs";
import fs from "node:fs";
import EPub from "epub";
import os from "node:os";
import path from "node:path";
import crypto from "node:crypto";
import { exec } from "node:child_process";
import http from "node:http";
async function getEpubCoverFromEpubFile_UNIX(
epubFilepath: string
): Promise<[Buffer, string] | null> {
let randomString = crypto.randomBytes(16).toString("hex");
let tempDir = path.join(os.tmpdir(), `tmp-${randomString}`);
fs.mkdirSync(tempDir);
let unzipCMD = `unzip -q ${epubFilepath} -d ${tempDir}`;
let unzipCMDExec = new Promise((resolve, reject) => {
exec(unzipCMD, (err: any, stdout: any, stderr: any) => {
if (err) reject(err);
resolve(stdout);
});
});
try {
await unzipCMDExec;
} catch (err) {
console.error(err);
fs.rmSync(tempDir, { recursive: true }); // we r good boys!
return null;
}
let findCMD = `find ${tempDir} -type f \\( -iname \\*.jpeg -o -iname \\*.jpg -o -iname \\*.png \\) | grep -Ei 'cover\\.|index-1_1'`;
let findCMDExec: Promise<string> = new Promise((resolve, reject) => {
exec(findCMD, (err: any, stdout: any, stderr: any) => {
if (err) reject(err);
resolve(stdout);
});
});
let selectedFilePath: string;
try {
selectedFilePath = await findCMDExec;
selectedFilePath = selectedFilePath.trim();
} catch (err) {
console.error(err);
fs.rmSync(tempDir, { recursive: true }); // we r good boys!
return null;
}
let ret: [Buffer, string] = [
Buffer.from(fs.readFileSync(selectedFilePath)),
selectedFilePath,
];
fs.rmSync(tempDir, { recursive: true }); // we r good boys!
return ret;
}
export default async function (
req: http.IncomingMessage,
res: http.ServerResponse
) {
const BOOK_DB = new BookModel();
const BUCKET = new Bucket();
await BOOK_DB.init();
await BUCKET.init();
try {
if (req.method === "GET") {
try {
let userBooks = await BOOK_DB.getBooks();
userBooks = userBooks.map((e) => {
delete e.path;
return e;
});
sendJsonResponse(res, userBooks, 200);
} catch (error) {
console.error(error);
sendJsonResponse( | res, ERROR.internalErr); |
}
} else if (req.method === "POST") {
const authorization = req.headers?.authorization;
const authToken = authorization?.split(" ")?.pop();
if (!authorization || !authToken) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const token = new Token();
const tokenStatus: TokStatus = token.verify(authToken);
if (
tokenStatus === TokStatus.INVALID ||
tokenStatus === TokStatus.INVALID_SIG
) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const parsedAuthToken: any = token.UNSAFE_parse(authToken);
let epubBuffer: Buffer;
epubBuffer = await parseSimplePostData(req);
let epubSizeInMB = Math.ceil(epubBuffer.length / 1e6);
let bufferMime = await filetype.fromBuffer(epubBuffer);
if (bufferMime.mime != "application/epub+zip") {
sendJsonResponse(res, ERROR.invalidMimeForResource, 415);
return;
}
if (epubSizeInMB > MAX_EPUB_SIZE_MB) {
sendJsonResponse(res, ERROR.fileTooLarge, 400);
return;
}
let randomString = crypto.randomBytes(16).toString("hex");
const tempEpubFilePath = path.join(os.tmpdir(), `tmp-${randomString}.epub`);
fs.writeFileSync(tempEpubFilePath, epubBuffer);
const epub: any = await new Promise((resolve, reject) => {
const epub = new EPub(tempEpubFilePath);
epub.on("end", () => resolve(epub));
epub.on("error", reject);
epub.parse();
});
let epubCoverBuffer = await getEpubCoverFromEpubFile_UNIX(tempEpubFilePath);
console.log(epubCoverBuffer);
let epubSignature = md5(epubBuffer.toString("hex"));
let foundBook = await BOOK_DB.getBook("", epubSignature);
if (foundBook) {
sendJsonResponse(
res,
{
...ERROR.resourceExists,
data: {
id: foundBook.id,
},
},
409
);
return;
}
let epubFilePermalink = await BUCKET.pushBufferWithName(
epubBuffer,
`${epubSignature}.epub`
);
let epubCoverPermalink = null;
if (epubCoverBuffer) {
epubCoverPermalink = await BUCKET.pushBufferWithName(
epubCoverBuffer[0],
`${epubSignature}.${epubCoverBuffer[1].split(".").pop()}`
);
}
let epubID = uuid();
let epubEntry: Book = {
id: epubID,
userid: parsedAuthToken.id,
title: epub.metadata?.title ?? epubID.split("-").pop(),
author: epub.metadata?.creator ?? parsedAuthToken.email,
path: epubFilePermalink,
signature: epubSignature,
cover: epubCoverPermalink,
};
const pushed = await BOOK_DB.pushBook(epubEntry);
if (!pushed) {
sendJsonResponse(res, ERROR.internalErr, 500);
return;
}
sendJsonResponse(
res,
{
error: null,
message: `successfully published a book of id ${epubEntry.id}`,
data: {
id: epubEntry.id,
},
},
201
);
} else if (req.method === "DELETE") {
const authorization = req.headers?.authorization;
const authToken = authorization?.split(" ")?.pop();
if (!authorization || !authToken) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const token = new Token();
const tokenStatus: TokStatus = token.verify(authToken);
if (
tokenStatus === TokStatus.INVALID ||
tokenStatus === TokStatus.INVALID_SIG
) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const parsedAuthToken: any = token.UNSAFE_parse(authToken);
let body: Buffer;
body = await parseSimplePostData(req);
let data: any;
try {
data = JSON.parse(body.toString());
} catch {
sendJsonResponse(res, ERROR.invalidJSONData, 400);
return;
}
if (!data.bookid) {
sendJsonResponse(res, ERROR.badRequest, 400);
return;
}
let bookDeleted = await BOOK_DB.deleteBook(data.bookid, parsedAuthToken.id);
if (!bookDeleted) {
sendJsonResponse(res, {
error: "unable-to-delete-book",
message: `was unable to delete book ${data.bookid}, perhaps the id was invalid?`,
status: 404
}, 404)
return;
}
sendJsonResponse(res, {
error: null,
message: `successfully deleted book of id ${data.bookid}`,
status: 204,
data: {
id: data.bookid,
}
}, 204)
}
} finally {
await BOOK_DB.close();
}
}
| src/routes/Books.ts | Aadv1k-quillia-52c5b34 | [
{
"filename": "src/models/UserModel.ts",
"retrieved_chunk": " let data = await this.client.query(`SELECT * FROM users`)\n return data.rows;\n } catch (error) {\n console.error(error);\n return null;\n }\n }\n async updateUser(user: User, toUpdate: object ) {\n let blobs = Object.keys(toUpdate).map((e, i) => {return `${e} = \\$${i+1}`})\n let toUpdateQuery = blobs.join(\" AND \");",
"score": 25.23003042717977
},
{
"filename": "src/routes/Signup.ts",
"retrieved_chunk": " sendJsonResponse(res, ERROR.internalErr, 500);\n }\n await DB.close();\n}",
"score": 13.768160053850377
},
{
"filename": "src/routes/Issue.ts",
"retrieved_chunk": " sendJsonResponse(res, ERROR.badRequest, 400);\n return;\n }\n let foundLender = await USER_DB.getUserByID(issueData.lenderid);\n let foundBook = await BOOK_DB.getBook(issueData.bookid);\n if (!foundLender || !foundBook) {\n sendJsonResponse(res, ERROR.resourceNotExists, 404);\n return;\n }\n let foundIssue = await ISSUE_DB.getIssue(",
"score": 12.537334188913514
},
{
"filename": "src/routes/Issue.ts",
"retrieved_chunk": " let issueData: Issue;\n try {\n let issuePostData = await parseSimplePostData(req);\n issueData = JSON.parse(issuePostData.toString());\n } catch (error) {\n console.error(error);\n sendJsonResponse(res, ERROR.badRequest, 400);\n return;\n }\n if (!issueData.lenderid || !issueData.bookid) {",
"score": 11.83320790953767
},
{
"filename": "src/routes/Issue.ts",
"retrieved_chunk": " let targetBook = await BOOK_DB.getBook(requestedBook);\n if (!targetBook) {\n sendJsonResponse(res, ERROR.resourceNotExists, 404);\n return;\n }\n let epubResourcePath = targetBook.path;\n const response: Array<Buffer> = await new Promise((resolve, reject) => {\n https.get(epubResourcePath, (res) => {\n let data: Array<Buffer> = [];\n res.on(\"data\", (d: Buffer) => data.push(d));",
"score": 11.683129652628725
}
] | typescript | res, ERROR.internalErr); |
import { setTimeout as delay } from 'timers/promises';
import { Injectable, Logger } from '@nestjs/common';
import { ConfigService } from '@nestjs/config';
import { Session } from '../../interfaces/session.interface';
import { SteamTokensService } from '../../modules/steam-tokens/steam-tokens.service';
@Injectable()
export class ValidateSessionsService {
private readonly logger = new Logger(ValidateSessionsService.name);
constructor(private readonly steamTokensService: SteamTokensService, private readonly configService: ConfigService) {}
public async validateSessions(sessions: Session[]) {
const valid: Session[] = [];
const invalid: Session[] = [];
for (const session of sessions) {
const { valid: isValid, errors, expires } = await this.validateSession(session);
if (isValid) {
valid.push(session);
this.logger.log(
`Valid: ${session.username}, days: ${Math.floor((expires - Date.now()) / (24 * 60 * 60 * 1000))}`,
);
} else {
invalid.push(session);
this.logger.warn(`Invalid: ${session.username}, errors: ${errors.join(', ')}`);
}
}
if (invalid.length > 0) {
this.logger.warn(`Invalid sessions:\n${invalid.map((session) => session.username).join('\n')}`);
}
await delay(1000);
}
private async validateSession(session: Session) {
const errors: string[] = [];
let expires = Date.now();
if (!session) errors.push('Invalid session');
if (session.schemaVersion !== this.configService.getOrThrow<number>('session.schemaVersion')) {
errors.push('Outdated schema version');
}
if (!session.username) errors.push('Invalid username');
if (!session.password) errors.push('Invalid password');
if (!session.steamId) errors.push('Invalid steamId');
if (!session.hasOwnProperty('sharedSecret')) errors.push('Invalid shared Secret');
if (!session.hasOwnProperty('identitySecret')) errors.push('Invalid identity Secret');
| if (session.desktopRefreshToken) { |
if (!this.steamTokensService.validateRefreshToken(session.desktopRefreshToken)) {
errors.push('Invalid desktop refresh token');
}
const tokenExpiration = this.steamTokensService.getRefreshTokenExpiration(session.desktopRefreshToken);
if (tokenExpiration > expires) expires = tokenExpiration;
}
if (session.mobileRefreshToken) {
if (!this.steamTokensService.validateRefreshToken(session.mobileRefreshToken)) {
errors.push('Invalid mobile refresh token');
}
const tokenExpiration = this.steamTokensService.getRefreshTokenExpiration(session.mobileRefreshToken);
if (tokenExpiration > expires) expires = tokenExpiration;
}
if (session.webRefreshToken) {
if (!this.steamTokensService.validateRefreshToken(session.webRefreshToken)) {
errors.push('Invalid web refresh token');
}
const tokenExpiration = this.steamTokensService.getRefreshTokenExpiration(session.webRefreshToken);
if (tokenExpiration > expires) expires = tokenExpiration;
}
if (expires < Date.now()) errors.push('Expired session');
return { valid: errors.length === 0, errors, expires };
}
}
| src/commands/validate/validate-sessions.service.ts | Sadzurami-steam-sessions-creator-97b7294 | [
{
"filename": "src/modules/sessions-import/sessions-import.service.ts",
"retrieved_chunk": " content = JSON.parse(content);\n if (content == null || typeof content !== 'object' || Array.isArray(content)) {\n throw new Error('Invalid session file');\n }\n const session = Object.fromEntries(\n Object.entries(content).map(([key, value]) => [key[0].toLowerCase() + key.slice(1), value]),\n ) as unknown as Session;\n result.values.push(session);\n } catch (error) {\n result.errors.push(filePath);",
"score": 61.270322341293195
},
{
"filename": "src/modules/accounts-import/accounts-import.service.ts",
"retrieved_chunk": " const result: { values: Account[]; errors: string[] } = { values: [], errors: [] };\n try {\n const { SteamLogin: username, SteamPassword: password } = JSON.parse(fileContent);\n if (!username) throw new Error('Invalid username');\n if (!password) throw new Error('Invalid password');\n const account = new Account(`${username}:${password}`);\n result.values.push(account);\n } catch (error) {\n result.errors.push(fileContent);\n }",
"score": 59.229422662334315
},
{
"filename": "src/commands/create/create-sessions.service.ts",
"retrieved_chunk": " password: account.password,\n sharedSecret: account.sharedSecret || null,\n identitySecret: account.identitySecret || null,\n steamId,\n webRefreshToken,\n mobileRefreshToken,\n desktopRefreshToken,\n schemaVersion,\n };\n return session;",
"score": 58.91280744274427
},
{
"filename": "src/modules/accounts-import/accounts-import.service.ts",
"retrieved_chunk": " return result;\n }\n private readAccountFromSessionFile(fileContent: string) {\n const result: { values: Account[]; errors: string[] } = { values: [], errors: [] };\n try {\n const { Username, Password, SharedSecret, IdentitySecret } = JSON.parse(fileContent);\n if (!Username) throw new Error('Invalid username');\n if (!Password) throw new Error('Invalid password');\n const account = new Account(`${Username}:${Password}:${SharedSecret || ''}:${IdentitySecret || ''}`);\n result.values.push(account);",
"score": 46.074953191494835
},
{
"filename": "src/modules/accounts-import/accounts-import.service.ts",
"retrieved_chunk": " const result: { values: Account[]; errors: string[] } = { values: [], errors: [] };\n try {\n let content = await this.readFilesQueue.add(() => fs.readFile(filePath, 'utf-8'));\n content = content.trim();\n if (content.length === 0) throw new Error('Empty file');\n // session file\n if (filePath.endsWith('.steamsession')) {\n const readResults = this.readAccountFromSessionFile(content);\n result.values.push(...readResults.values);\n if (readResults.errors.length > 0) result.errors.push(filePath);",
"score": 43.897743302543034
}
] | typescript | if (session.desktopRefreshToken) { |
import { TokStatus } from "../common/types";
import { JWT } from "../common/const"
import { createHmac } from "node:crypto";
export default class Token {
generate(user: object, expiresIn?: number): string {
const head = { algorithm: JWT.ALGO, typ: "JWT"};
const createdAt = Math.floor(Date.now() / 1000);
const body = { ...user, iat: createdAt, exp: null}
if (expiresIn) {
body.exp = createdAt + expiresIn;
}
let b64Head = Buffer.from(JSON.stringify(head)).toString("base64").replace(/=/g, "");
let b64Body = Buffer.from(JSON.stringify(body)).toString("base64").replace(/=/g, "");
let signature = this.sign(`${b64Head}.${b64Body}`);
return `${b64Head}.${b64Body}.${signature}`
}
verify(token: string): TokStatus {
let [head, body, signature] = token.split('.');
if (!head || !body || !signature) {
return TokStatus.INVALID;
}
if (this.sign(`${head}.${body}`) !== signature) {
return TokStatus.INVALID_SIG
}
let decodedBody = Buffer.from(body, "base64").toString("utf-8");
const curTime = Math.floor(Date.now() / 1000);
if (JSON.parse(decodedBody)?.exp > curTime) {
return TokStatus.EXPIRED;
}
return TokStatus.VALID
}
// assumes that the token is valid
UNSAFE_parse(token: string): object {
const [ _a, body, _b ] = token.split(".");
const parsedBody = Buffer.from(body, "base64").toString("utf-8");
const parsedJson = JSON.parse(parsedBody);
return parsedJson;
}
private sign(data: string): string {
return createHmac(JWT. | HASH, JWT.SECRET)
.update(data)
.digest("base64")
.replace(/=/g, '')
} |
}
| src/lib/GenerateToken.ts | Aadv1k-quillia-52c5b34 | [
{
"filename": "src/common/utils.ts",
"retrieved_chunk": " resolve(buf);\n });\n req.on(\"error\", reject);\n })\n}\nexport function md5(data: string): string {\n return crypto\n .createHash(\"md5\")\n .update(data)\n .digest(\"hex\");",
"score": 18.54082193265969
},
{
"filename": "src/common/const.ts",
"retrieved_chunk": " DB_NAME: process.env.PG_DB,\n}\nexport const CLOUDINARY_CONF = {\n API_KEY: process.env.CLOUDINARY_API_KEY,\n API_SECRET: process.env.CLOUDINARY_API_SECRET,\n CLOUD_NAME: \"dbloby3uq\",\n}\nexport const JWT = {\n SECRET: process.env.JWT_SECRET ?? \"default\",\n ALGO: \"HS256\",",
"score": 14.30128348756572
},
{
"filename": "src/routes/Login.ts",
"retrieved_chunk": " const DB = new UserModel();\n let data: any = await parseSimplePostData(req);\n data = data.toString();\n if (req.method !== \"POST\") {\n sendJsonResponse(res, ERROR.methodNotAllowed, 405);\n return;\n }\n let parsedData: User;\n try {\n parsedData = JSON.parse(data);",
"score": 11.127558193543766
},
{
"filename": "src/routes/Books.ts",
"retrieved_chunk": " }\n const parsedAuthToken: any = token.UNSAFE_parse(authToken);\n let body: Buffer;\n body = await parseSimplePostData(req);\n let data: any;\n try {\n data = JSON.parse(body.toString());\n } catch {\n sendJsonResponse(res, ERROR.invalidJSONData, 400);\n return;",
"score": 10.287652325137763
},
{
"filename": "src/routes/Signup.ts",
"retrieved_chunk": " parsedData = JSON.parse(data === \"\" ? '{}' : data);\n } catch {\n sendJsonResponse(res, ERROR.invalidJSONData, 400)\n return;\n }\n if (!parsedData.email || !parsedData.password) {\n sendJsonResponse(res, ERROR.badRequest, 400);\n return;\n }\n if (!isEmailValid(parsedData.email)) {",
"score": 10.054165450579347
}
] | typescript | HASH, JWT.SECRET)
.update(data)
.digest("base64")
.replace(/=/g, '')
} |
import { setTimeout as delay } from 'timers/promises';
import { Injectable, Logger } from '@nestjs/common';
import { ConfigService } from '@nestjs/config';
import { Session } from '../../interfaces/session.interface';
import { SteamTokensService } from '../../modules/steam-tokens/steam-tokens.service';
@Injectable()
export class ValidateSessionsService {
private readonly logger = new Logger(ValidateSessionsService.name);
constructor(private readonly steamTokensService: SteamTokensService, private readonly configService: ConfigService) {}
public async validateSessions(sessions: Session[]) {
const valid: Session[] = [];
const invalid: Session[] = [];
for (const session of sessions) {
const { valid: isValid, errors, expires } = await this.validateSession(session);
if (isValid) {
valid.push(session);
this.logger.log(
`Valid: ${session.username}, days: ${Math.floor((expires - Date.now()) / (24 * 60 * 60 * 1000))}`,
);
} else {
invalid.push(session);
this.logger.warn(`Invalid: ${session.username}, errors: ${errors.join(', ')}`);
}
}
if (invalid.length > 0) {
this.logger.warn(`Invalid sessions:\n${invalid.map((session) => session.username).join('\n')}`);
}
await delay(1000);
}
private async validateSession(session: Session) {
const errors: string[] = [];
let expires = Date.now();
if (!session) errors.push('Invalid session');
if (session.schemaVersion !== this.configService.getOrThrow<number>('session.schemaVersion')) {
errors.push('Outdated schema version');
}
if (!session.username) errors.push('Invalid username');
| if (!session.password) errors.push('Invalid password'); |
if (!session.steamId) errors.push('Invalid steamId');
if (!session.hasOwnProperty('sharedSecret')) errors.push('Invalid shared Secret');
if (!session.hasOwnProperty('identitySecret')) errors.push('Invalid identity Secret');
if (session.desktopRefreshToken) {
if (!this.steamTokensService.validateRefreshToken(session.desktopRefreshToken)) {
errors.push('Invalid desktop refresh token');
}
const tokenExpiration = this.steamTokensService.getRefreshTokenExpiration(session.desktopRefreshToken);
if (tokenExpiration > expires) expires = tokenExpiration;
}
if (session.mobileRefreshToken) {
if (!this.steamTokensService.validateRefreshToken(session.mobileRefreshToken)) {
errors.push('Invalid mobile refresh token');
}
const tokenExpiration = this.steamTokensService.getRefreshTokenExpiration(session.mobileRefreshToken);
if (tokenExpiration > expires) expires = tokenExpiration;
}
if (session.webRefreshToken) {
if (!this.steamTokensService.validateRefreshToken(session.webRefreshToken)) {
errors.push('Invalid web refresh token');
}
const tokenExpiration = this.steamTokensService.getRefreshTokenExpiration(session.webRefreshToken);
if (tokenExpiration > expires) expires = tokenExpiration;
}
if (expires < Date.now()) errors.push('Expired session');
return { valid: errors.length === 0, errors, expires };
}
}
| src/commands/validate/validate-sessions.service.ts | Sadzurami-steam-sessions-creator-97b7294 | [
{
"filename": "src/modules/sessions-import/sessions-import.service.ts",
"retrieved_chunk": " content = JSON.parse(content);\n if (content == null || typeof content !== 'object' || Array.isArray(content)) {\n throw new Error('Invalid session file');\n }\n const session = Object.fromEntries(\n Object.entries(content).map(([key, value]) => [key[0].toLowerCase() + key.slice(1), value]),\n ) as unknown as Session;\n result.values.push(session);\n } catch (error) {\n result.errors.push(filePath);",
"score": 40.62760314278856
},
{
"filename": "src/modules/accounts-import/accounts-import.service.ts",
"retrieved_chunk": " const result: { values: Account[]; errors: string[] } = { values: [], errors: [] };\n try {\n const { SteamLogin: username, SteamPassword: password } = JSON.parse(fileContent);\n if (!username) throw new Error('Invalid username');\n if (!password) throw new Error('Invalid password');\n const account = new Account(`${username}:${password}`);\n result.values.push(account);\n } catch (error) {\n result.errors.push(fileContent);\n }",
"score": 40.29220394876622
},
{
"filename": "src/modules/accounts-import/accounts-import.service.ts",
"retrieved_chunk": " const result: { values: Account[]; errors: string[] } = { values: [], errors: [] };\n try {\n let content = await this.readFilesQueue.add(() => fs.readFile(filePath, 'utf-8'));\n content = content.trim();\n if (content.length === 0) throw new Error('Empty file');\n // session file\n if (filePath.endsWith('.steamsession')) {\n const readResults = this.readAccountFromSessionFile(content);\n result.values.push(...readResults.values);\n if (readResults.errors.length > 0) result.errors.push(filePath);",
"score": 31.411728134125926
},
{
"filename": "src/modules/accounts-import/accounts-import.service.ts",
"retrieved_chunk": " return result;\n }\n private readAccountFromSessionFile(fileContent: string) {\n const result: { values: Account[]; errors: string[] } = { values: [], errors: [] };\n try {\n const { Username, Password, SharedSecret, IdentitySecret } = JSON.parse(fileContent);\n if (!Username) throw new Error('Invalid username');\n if (!Password) throw new Error('Invalid password');\n const account = new Account(`${Username}:${Password}:${SharedSecret || ''}:${IdentitySecret || ''}`);\n result.values.push(account);",
"score": 30.70209202223257
},
{
"filename": "src/commands/create/create-sessions.service.ts",
"retrieved_chunk": " password: account.password,\n sharedSecret: account.sharedSecret || null,\n identitySecret: account.identitySecret || null,\n steamId,\n webRefreshToken,\n mobileRefreshToken,\n desktopRefreshToken,\n schemaVersion,\n };\n return session;",
"score": 30.679601532608064
}
] | typescript | if (!session.password) errors.push('Invalid password'); |
import BookModel from "../models/BookModel";
import Bucket from "../models/Bucket";
import Token from "../lib/GenerateToken";
import { ERROR, MAX_EPUB_SIZE_MB } from "../common/const";
import { TokStatus, Book } from "../common/types";
import {
sendJsonResponse,
parseSimplePostData,
md5,
uuid,
} from "../common/utils";
import filetype from "file-type-cjs";
import fs from "node:fs";
import EPub from "epub";
import os from "node:os";
import path from "node:path";
import crypto from "node:crypto";
import { exec } from "node:child_process";
import http from "node:http";
async function getEpubCoverFromEpubFile_UNIX(
epubFilepath: string
): Promise<[Buffer, string] | null> {
let randomString = crypto.randomBytes(16).toString("hex");
let tempDir = path.join(os.tmpdir(), `tmp-${randomString}`);
fs.mkdirSync(tempDir);
let unzipCMD = `unzip -q ${epubFilepath} -d ${tempDir}`;
let unzipCMDExec = new Promise((resolve, reject) => {
exec(unzipCMD, (err: any, stdout: any, stderr: any) => {
if (err) reject(err);
resolve(stdout);
});
});
try {
await unzipCMDExec;
} catch (err) {
console.error(err);
fs.rmSync(tempDir, { recursive: true }); // we r good boys!
return null;
}
let findCMD = `find ${tempDir} -type f \\( -iname \\*.jpeg -o -iname \\*.jpg -o -iname \\*.png \\) | grep -Ei 'cover\\.|index-1_1'`;
let findCMDExec: Promise<string> = new Promise((resolve, reject) => {
exec(findCMD, (err: any, stdout: any, stderr: any) => {
if (err) reject(err);
resolve(stdout);
});
});
let selectedFilePath: string;
try {
selectedFilePath = await findCMDExec;
selectedFilePath = selectedFilePath.trim();
} catch (err) {
console.error(err);
fs.rmSync(tempDir, { recursive: true }); // we r good boys!
return null;
}
let ret: [Buffer, string] = [
Buffer.from(fs.readFileSync(selectedFilePath)),
selectedFilePath,
];
fs.rmSync(tempDir, { recursive: true }); // we r good boys!
return ret;
}
export default async function (
req: http.IncomingMessage,
res: http.ServerResponse
) {
const BOOK_DB = new BookModel();
const BUCKET = new Bucket();
await BOOK_DB.init();
await BUCKET.init();
try {
if (req.method === "GET") {
try {
let userBooks = await BOOK_DB.getBooks();
userBooks = userBooks.map((e) => {
delete e.path;
return e;
});
sendJsonResponse(res, userBooks, 200);
} catch (error) {
console.error(error);
sendJsonResponse(res, ERROR.internalErr);
}
} else if (req.method === "POST") {
const authorization = req.headers?.authorization;
const authToken = authorization?.split(" ")?.pop();
if (!authorization || !authToken) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
| const token = new Token(); |
const tokenStatus: TokStatus = token.verify(authToken);
if (
tokenStatus === TokStatus.INVALID ||
tokenStatus === TokStatus.INVALID_SIG
) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const parsedAuthToken: any = token.UNSAFE_parse(authToken);
let epubBuffer: Buffer;
epubBuffer = await parseSimplePostData(req);
let epubSizeInMB = Math.ceil(epubBuffer.length / 1e6);
let bufferMime = await filetype.fromBuffer(epubBuffer);
if (bufferMime.mime != "application/epub+zip") {
sendJsonResponse(res, ERROR.invalidMimeForResource, 415);
return;
}
if (epubSizeInMB > MAX_EPUB_SIZE_MB) {
sendJsonResponse(res, ERROR.fileTooLarge, 400);
return;
}
let randomString = crypto.randomBytes(16).toString("hex");
const tempEpubFilePath = path.join(os.tmpdir(), `tmp-${randomString}.epub`);
fs.writeFileSync(tempEpubFilePath, epubBuffer);
const epub: any = await new Promise((resolve, reject) => {
const epub = new EPub(tempEpubFilePath);
epub.on("end", () => resolve(epub));
epub.on("error", reject);
epub.parse();
});
let epubCoverBuffer = await getEpubCoverFromEpubFile_UNIX(tempEpubFilePath);
console.log(epubCoverBuffer);
let epubSignature = md5(epubBuffer.toString("hex"));
let foundBook = await BOOK_DB.getBook("", epubSignature);
if (foundBook) {
sendJsonResponse(
res,
{
...ERROR.resourceExists,
data: {
id: foundBook.id,
},
},
409
);
return;
}
let epubFilePermalink = await BUCKET.pushBufferWithName(
epubBuffer,
`${epubSignature}.epub`
);
let epubCoverPermalink = null;
if (epubCoverBuffer) {
epubCoverPermalink = await BUCKET.pushBufferWithName(
epubCoverBuffer[0],
`${epubSignature}.${epubCoverBuffer[1].split(".").pop()}`
);
}
let epubID = uuid();
let epubEntry: Book = {
id: epubID,
userid: parsedAuthToken.id,
title: epub.metadata?.title ?? epubID.split("-").pop(),
author: epub.metadata?.creator ?? parsedAuthToken.email,
path: epubFilePermalink,
signature: epubSignature,
cover: epubCoverPermalink,
};
const pushed = await BOOK_DB.pushBook(epubEntry);
if (!pushed) {
sendJsonResponse(res, ERROR.internalErr, 500);
return;
}
sendJsonResponse(
res,
{
error: null,
message: `successfully published a book of id ${epubEntry.id}`,
data: {
id: epubEntry.id,
},
},
201
);
} else if (req.method === "DELETE") {
const authorization = req.headers?.authorization;
const authToken = authorization?.split(" ")?.pop();
if (!authorization || !authToken) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const token = new Token();
const tokenStatus: TokStatus = token.verify(authToken);
if (
tokenStatus === TokStatus.INVALID ||
tokenStatus === TokStatus.INVALID_SIG
) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const parsedAuthToken: any = token.UNSAFE_parse(authToken);
let body: Buffer;
body = await parseSimplePostData(req);
let data: any;
try {
data = JSON.parse(body.toString());
} catch {
sendJsonResponse(res, ERROR.invalidJSONData, 400);
return;
}
if (!data.bookid) {
sendJsonResponse(res, ERROR.badRequest, 400);
return;
}
let bookDeleted = await BOOK_DB.deleteBook(data.bookid, parsedAuthToken.id);
if (!bookDeleted) {
sendJsonResponse(res, {
error: "unable-to-delete-book",
message: `was unable to delete book ${data.bookid}, perhaps the id was invalid?`,
status: 404
}, 404)
return;
}
sendJsonResponse(res, {
error: null,
message: `successfully deleted book of id ${data.bookid}`,
status: 204,
data: {
id: data.bookid,
}
}, 204)
}
} finally {
await BOOK_DB.close();
}
}
| src/routes/Books.ts | Aadv1k-quillia-52c5b34 | [
{
"filename": "src/routes/Issue.ts",
"retrieved_chunk": " const BOOK_DB = new BookModel();\n const USER_DB = new UserModel();\n const authorization = req.headers?.authorization;\n const authToken = authorization?.split(\" \")?.pop()?.trim();\n try {\n if (req.method === \"OPTIONS\") {\n sendJsonResponse(res, {}, 200);\n return;\n }\n if (!authorization || !authToken) {",
"score": 63.14017832812543
},
{
"filename": "src/routes/Issue.ts",
"retrieved_chunk": " sendJsonResponse(res, ERROR.unauthorized, 401);\n return;\n }\n const token = new Token();\n const tokenStatus: TokStatus = token.verify(authToken);\n if (\n tokenStatus === TokStatus.INVALID ||\n tokenStatus === TokStatus.INVALID_SIG\n ) {\n sendJsonResponse(res, ERROR.unauthorized, 401);",
"score": 36.91097361806492
},
{
"filename": "src/routes/Issue.ts",
"retrieved_chunk": " sendJsonResponse(res, ERROR.resourceNotExists, 404);\n } else {\n sendJsonResponse(res, userIssues, 200);\n }\n }\n } else if (req.method === \"POST\") {\n if (req.headers?.[\"content-type\"] != \"application/json\") {\n sendJsonResponse(res, ERROR.invalidMimeForResource, 415);\n return;\n }",
"score": 32.13966841025594
},
{
"filename": "src/routes/Login.ts",
"retrieved_chunk": " }\n if (md5(parsedData.password) !== foundUser.password) {\n sendJsonResponse(res, ERROR.unauthorized, 401);\n return;\n }\n const token = new Token();\n const { password, ...tokenBody} = foundUser;\n let accessToken = token.generate(tokenBody);\n sendJsonResponse(res, {\n messaged: \"found the given user\",",
"score": 24.828924171451927
},
{
"filename": "src/routes/Signup.ts",
"retrieved_chunk": ") {\n const DB = new UserModel();\n if (req.method !== \"POST\") {\n sendJsonResponse(res, ERROR.methodNotAllowed, 405);\n return;\n }\n let data: any = await parseSimplePostData(req);\n data = data.toString();\n let parsedData: User;\n try {",
"score": 22.802201592671853
}
] | typescript | const token = new Token(); |
import http from "node:http";
import { sendJsonResponse, md5, uuid, parseSimplePostData } from "../common/utils";
import { ERROR } from "../common/const";
import { User } from "../common/types";
import UserModel from "../models/UserModel";
import Token from "../lib/GenerateToken";
import isEmailValid from "../lib/isEmailValid";
export default async function (
req: http.IncomingMessage,
res: http.ServerResponse
) {
const DB = new UserModel();
if (req.method !== "POST") {
sendJsonResponse(res, ERROR.methodNotAllowed, 405);
return;
}
let data: any = await parseSimplePostData(req);
data = data.toString();
let parsedData: User;
try {
parsedData = JSON.parse(data === "" ? '{}' : data);
} catch {
sendJsonResponse(res, ERROR.invalidJSONData, 400)
return;
}
if (!parsedData.email || !parsedData.password) {
sendJsonResponse(res, ERROR.badRequest, 400);
return;
}
if (!isEmailValid(parsedData.email)) {
sendJsonResponse(res, ERROR.badRequest, 400);
return;
}
await DB.init();
let foundUser = await DB.getUser(parsedData.email);
if (foundUser) {
sendJsonResponse(res, ERROR.userAlreadyExists, 409)
return;
}
let user: User = {
id: uuid(),
email: parsedData.email,
password: | md5(parsedData.password),
}
const token = new Token(); |
let pushed = await DB.pushUser(user)
const { password, ...tokenBody} = user;
let accessToken = token.generate(tokenBody);
if (pushed !== null) {
sendJsonResponse(res, {
status: 201,
message: "successfully created new user",
error: null,
token: accessToken,
data: {
email: user.email,
id: user.id
}
}, 201)
} else {
sendJsonResponse(res, ERROR.internalErr, 500);
}
await DB.close();
}
| src/routes/Signup.ts | Aadv1k-quillia-52c5b34 | [
{
"filename": "src/routes/Login.ts",
"retrieved_chunk": " }\n if (md5(parsedData.password) !== foundUser.password) {\n sendJsonResponse(res, ERROR.unauthorized, 401);\n return;\n }\n const token = new Token();\n const { password, ...tokenBody} = foundUser;\n let accessToken = token.generate(tokenBody);\n sendJsonResponse(res, {\n messaged: \"found the given user\",",
"score": 33.450616868706135
},
{
"filename": "src/models/UserModel.ts",
"retrieved_chunk": " await this.client.query(`DELETE FROM users WHERE id = $1 OR email = $2`, [user.id, user.email]);\n return user;\n } catch (error) {\n throw error;\n }\n }\n async pushUser(user: User): Promise<User | void> {\n try {\n await this.client.query(\"INSERT INTO users (id, email, password) VALUES ($1, $2, $3)\", [user.id, user.email, user.password]);\n return user;",
"score": 22.586947362230255
},
{
"filename": "src/routes/Login.ts",
"retrieved_chunk": " } catch(error) {\n sendJsonResponse(res, ERROR.invalidJSONData, 400)\n return;\n }\n await DB.init();\n const foundUser: User = await DB.getUser(parsedData.email);\n await DB.close();\n if (!foundUser) {\n sendJsonResponse(res, ERROR.userNotFound, 404);\n return;",
"score": 19.87902317446901
},
{
"filename": "src/routes/Login.ts",
"retrieved_chunk": " const DB = new UserModel();\n let data: any = await parseSimplePostData(req);\n data = data.toString();\n if (req.method !== \"POST\") {\n sendJsonResponse(res, ERROR.methodNotAllowed, 405);\n return;\n }\n let parsedData: User;\n try {\n parsedData = JSON.parse(data);",
"score": 18.828966267321178
},
{
"filename": "src/common/types.ts",
"retrieved_chunk": "export interface User {\n id?: string,\n email: string;\n password: string;\n}\nexport interface Issue {\n id: string,\n lenderid: string,\n borrowerid: string,\n bookid: string",
"score": 15.686673137662186
}
] | typescript | md5(parsedData.password),
}
const token = new Token(); |
import BookModel from "../models/BookModel";
import Bucket from "../models/Bucket";
import Token from "../lib/GenerateToken";
import { ERROR, MAX_EPUB_SIZE_MB } from "../common/const";
import { TokStatus, Book } from "../common/types";
import {
sendJsonResponse,
parseSimplePostData,
md5,
uuid,
} from "../common/utils";
import filetype from "file-type-cjs";
import fs from "node:fs";
import EPub from "epub";
import os from "node:os";
import path from "node:path";
import crypto from "node:crypto";
import { exec } from "node:child_process";
import http from "node:http";
async function getEpubCoverFromEpubFile_UNIX(
epubFilepath: string
): Promise<[Buffer, string] | null> {
let randomString = crypto.randomBytes(16).toString("hex");
let tempDir = path.join(os.tmpdir(), `tmp-${randomString}`);
fs.mkdirSync(tempDir);
let unzipCMD = `unzip -q ${epubFilepath} -d ${tempDir}`;
let unzipCMDExec = new Promise((resolve, reject) => {
exec(unzipCMD, (err: any, stdout: any, stderr: any) => {
if (err) reject(err);
resolve(stdout);
});
});
try {
await unzipCMDExec;
} catch (err) {
console.error(err);
fs.rmSync(tempDir, { recursive: true }); // we r good boys!
return null;
}
let findCMD = `find ${tempDir} -type f \\( -iname \\*.jpeg -o -iname \\*.jpg -o -iname \\*.png \\) | grep -Ei 'cover\\.|index-1_1'`;
let findCMDExec: Promise<string> = new Promise((resolve, reject) => {
exec(findCMD, (err: any, stdout: any, stderr: any) => {
if (err) reject(err);
resolve(stdout);
});
});
let selectedFilePath: string;
try {
selectedFilePath = await findCMDExec;
selectedFilePath = selectedFilePath.trim();
} catch (err) {
console.error(err);
fs.rmSync(tempDir, { recursive: true }); // we r good boys!
return null;
}
let ret: [Buffer, string] = [
Buffer.from(fs.readFileSync(selectedFilePath)),
selectedFilePath,
];
fs.rmSync(tempDir, { recursive: true }); // we r good boys!
return ret;
}
export default async function (
req: http.IncomingMessage,
res: http.ServerResponse
) {
const BOOK_DB = new BookModel();
const BUCKET = new Bucket();
await BOOK_DB.init();
await BUCKET.init();
try {
if (req.method === "GET") {
try {
let userBooks = await BOOK_DB.getBooks();
userBooks = userBooks.map((e) => {
delete e.path;
return e;
});
sendJsonResponse(res, userBooks, 200);
} catch (error) {
console.error(error);
sendJsonResponse(res, ERROR.internalErr);
}
} else if (req.method === "POST") {
const authorization = req.headers?.authorization;
const authToken = authorization?.split(" ")?.pop();
if (!authorization || !authToken) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const token = new Token();
| const tokenStatus: TokStatus = token.verify(authToken); |
if (
tokenStatus === TokStatus.INVALID ||
tokenStatus === TokStatus.INVALID_SIG
) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const parsedAuthToken: any = token.UNSAFE_parse(authToken);
let epubBuffer: Buffer;
epubBuffer = await parseSimplePostData(req);
let epubSizeInMB = Math.ceil(epubBuffer.length / 1e6);
let bufferMime = await filetype.fromBuffer(epubBuffer);
if (bufferMime.mime != "application/epub+zip") {
sendJsonResponse(res, ERROR.invalidMimeForResource, 415);
return;
}
if (epubSizeInMB > MAX_EPUB_SIZE_MB) {
sendJsonResponse(res, ERROR.fileTooLarge, 400);
return;
}
let randomString = crypto.randomBytes(16).toString("hex");
const tempEpubFilePath = path.join(os.tmpdir(), `tmp-${randomString}.epub`);
fs.writeFileSync(tempEpubFilePath, epubBuffer);
const epub: any = await new Promise((resolve, reject) => {
const epub = new EPub(tempEpubFilePath);
epub.on("end", () => resolve(epub));
epub.on("error", reject);
epub.parse();
});
let epubCoverBuffer = await getEpubCoverFromEpubFile_UNIX(tempEpubFilePath);
console.log(epubCoverBuffer);
let epubSignature = md5(epubBuffer.toString("hex"));
let foundBook = await BOOK_DB.getBook("", epubSignature);
if (foundBook) {
sendJsonResponse(
res,
{
...ERROR.resourceExists,
data: {
id: foundBook.id,
},
},
409
);
return;
}
let epubFilePermalink = await BUCKET.pushBufferWithName(
epubBuffer,
`${epubSignature}.epub`
);
let epubCoverPermalink = null;
if (epubCoverBuffer) {
epubCoverPermalink = await BUCKET.pushBufferWithName(
epubCoverBuffer[0],
`${epubSignature}.${epubCoverBuffer[1].split(".").pop()}`
);
}
let epubID = uuid();
let epubEntry: Book = {
id: epubID,
userid: parsedAuthToken.id,
title: epub.metadata?.title ?? epubID.split("-").pop(),
author: epub.metadata?.creator ?? parsedAuthToken.email,
path: epubFilePermalink,
signature: epubSignature,
cover: epubCoverPermalink,
};
const pushed = await BOOK_DB.pushBook(epubEntry);
if (!pushed) {
sendJsonResponse(res, ERROR.internalErr, 500);
return;
}
sendJsonResponse(
res,
{
error: null,
message: `successfully published a book of id ${epubEntry.id}`,
data: {
id: epubEntry.id,
},
},
201
);
} else if (req.method === "DELETE") {
const authorization = req.headers?.authorization;
const authToken = authorization?.split(" ")?.pop();
if (!authorization || !authToken) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const token = new Token();
const tokenStatus: TokStatus = token.verify(authToken);
if (
tokenStatus === TokStatus.INVALID ||
tokenStatus === TokStatus.INVALID_SIG
) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const parsedAuthToken: any = token.UNSAFE_parse(authToken);
let body: Buffer;
body = await parseSimplePostData(req);
let data: any;
try {
data = JSON.parse(body.toString());
} catch {
sendJsonResponse(res, ERROR.invalidJSONData, 400);
return;
}
if (!data.bookid) {
sendJsonResponse(res, ERROR.badRequest, 400);
return;
}
let bookDeleted = await BOOK_DB.deleteBook(data.bookid, parsedAuthToken.id);
if (!bookDeleted) {
sendJsonResponse(res, {
error: "unable-to-delete-book",
message: `was unable to delete book ${data.bookid}, perhaps the id was invalid?`,
status: 404
}, 404)
return;
}
sendJsonResponse(res, {
error: null,
message: `successfully deleted book of id ${data.bookid}`,
status: 204,
data: {
id: data.bookid,
}
}, 204)
}
} finally {
await BOOK_DB.close();
}
}
| src/routes/Books.ts | Aadv1k-quillia-52c5b34 | [
{
"filename": "src/routes/Issue.ts",
"retrieved_chunk": " const BOOK_DB = new BookModel();\n const USER_DB = new UserModel();\n const authorization = req.headers?.authorization;\n const authToken = authorization?.split(\" \")?.pop()?.trim();\n try {\n if (req.method === \"OPTIONS\") {\n sendJsonResponse(res, {}, 200);\n return;\n }\n if (!authorization || !authToken) {",
"score": 66.6645596609734
},
{
"filename": "src/routes/Issue.ts",
"retrieved_chunk": " sendJsonResponse(res, ERROR.unauthorized, 401);\n return;\n }\n const token = new Token();\n const tokenStatus: TokStatus = token.verify(authToken);\n if (\n tokenStatus === TokStatus.INVALID ||\n tokenStatus === TokStatus.INVALID_SIG\n ) {\n sendJsonResponse(res, ERROR.unauthorized, 401);",
"score": 54.32750060029924
},
{
"filename": "src/routes/Issue.ts",
"retrieved_chunk": " return;\n }\n await ISSUE_DB.init();\n await BOOK_DB.init();\n await USER_DB.init();\n const parsedAuthToken: any = token.UNSAFE_parse(authToken);\n if (req.method === \"GET\") {\n let URLParams = req.url.split(\"/\").slice(3);\n let requestedBook = URLParams?.[0];\n if (requestedBook) {",
"score": 27.40132074054378
},
{
"filename": "src/routes/Issue.ts",
"retrieved_chunk": " sendJsonResponse(res, ERROR.resourceNotExists, 404);\n } else {\n sendJsonResponse(res, userIssues, 200);\n }\n }\n } else if (req.method === \"POST\") {\n if (req.headers?.[\"content-type\"] != \"application/json\") {\n sendJsonResponse(res, ERROR.invalidMimeForResource, 415);\n return;\n }",
"score": 26.648980094979883
},
{
"filename": "src/routes/Login.ts",
"retrieved_chunk": " }\n if (md5(parsedData.password) !== foundUser.password) {\n sendJsonResponse(res, ERROR.unauthorized, 401);\n return;\n }\n const token = new Token();\n const { password, ...tokenBody} = foundUser;\n let accessToken = token.generate(tokenBody);\n sendJsonResponse(res, {\n messaged: \"found the given user\",",
"score": 24.237148474245252
}
] | typescript | const tokenStatus: TokStatus = token.verify(authToken); |
import IssueModel from "../models/IssueModel";
import BookModel from "../models/BookModel";
import UserModel from "../models/UserModel";
import Token from "../lib/GenerateToken";
import { ERROR } from "../common/const";
import { TokStatus, Issue } from "../common/types";
import {
sendJsonResponse,
sendEpubResponse,
parseSimplePostData,
uuid,
getBufferFromRawURL,
} from "../common/utils";
import http from "node:http";
import https from "node:https";
export default async function (
req: http.IncomingMessage,
res: http.ServerResponse
) {
const ISSUE_DB = new IssueModel();
const BOOK_DB = new BookModel();
const USER_DB = new UserModel();
const authorization = req.headers?.authorization;
const authToken = authorization?.split(" ")?.pop()?.trim();
try {
if (req.method === "OPTIONS") {
sendJsonResponse(res, {}, 200);
return;
}
if (!authorization || !authToken) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const token = new Token();
const tokenStatus: TokStatus = token.verify(authToken);
if (
tokenStatus === TokStatus.INVALID ||
tokenStatus === TokStatus.INVALID_SIG
) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
await ISSUE_DB.init();
await BOOK_DB.init();
await USER_DB.init();
const parsedAuthToken: any = token.UNSAFE_parse(authToken);
if (req.method === "GET") {
let URLParams = req.url.split("/").slice(3);
let requestedBook = URLParams?.[0];
if (requestedBook) {
let targetBook = await BOOK_DB.getBook(requestedBook);
if (!targetBook) {
sendJsonResponse(res, ERROR.resourceNotExists, 404);
return;
}
let epubResourcePath = targetBook.path;
const response: Array<Buffer> = await new Promise((resolve, reject) => {
https.get(epubResourcePath, (res) => {
let data: Array<Buffer> = [];
res.on("data", (d: Buffer) => data.push(d));
res.on("end", () => resolve(data));
res.on("error", (error) => reject(error));
});
});
let epubBuffer = Buffer.concat(response);
sendEpubResponse(res, epubBuffer);
return;
} else {
let userIssues = await ISSUE_DB.getIssues(parsedAuthToken.id);
if (!userIssues) {
sendJsonResponse(res, ERROR.resourceNotExists, 404);
} else {
sendJsonResponse(res, userIssues, 200);
}
}
} else if (req.method === "POST") {
if (req.headers?.["content-type"] != "application/json") {
sendJsonResponse(res, ERROR.invalidMimeForResource, 415);
return;
}
| let issueData: Issue; |
try {
let issuePostData = await parseSimplePostData(req);
issueData = JSON.parse(issuePostData.toString());
} catch (error) {
console.error(error);
sendJsonResponse(res, ERROR.badRequest, 400);
return;
}
if (!issueData.lenderid || !issueData.bookid) {
sendJsonResponse(res, ERROR.badRequest, 400);
return;
}
let foundLender = await USER_DB.getUserByID(issueData.lenderid);
let foundBook = await BOOK_DB.getBook(issueData.bookid);
if (!foundLender || !foundBook) {
sendJsonResponse(res, ERROR.resourceNotExists, 404);
return;
}
let foundIssue = await ISSUE_DB.getIssue(
foundLender.id,
foundBook.id,
parsedAuthToken.id
);
if (foundIssue) {
sendJsonResponse(
res,
{
...ERROR.resourceExists,
data: {
id: foundIssue.id,
bookid: foundIssue.bookid,
},
},
409
);
return;
}
let issueid = uuid();
let issueEntry: Issue = {
id: issueid,
borrowerid: parsedAuthToken.id,
lenderid: foundLender.id,
bookid: foundBook.id,
};
const pushed = await ISSUE_DB.pushIssue(issueEntry);
if (!pushed) {
sendJsonResponse(res, ERROR.internalErr, 500);
return;
}
sendJsonResponse(
res,
{
error: null,
message: `successfully created a new issue of id ${issueEntry.id}`,
data: {
id: pushed.id,
borrower: pushed.borrowerid,
lender: pushed.lenderid,
book: foundBook.title,
},
},
201
);
}
} finally {
await ISSUE_DB.close();
await BOOK_DB.close();
await USER_DB.close();
}
}
| src/routes/Issue.ts | Aadv1k-quillia-52c5b34 | [
{
"filename": "src/routes/Books.ts",
"retrieved_chunk": " console.error(error);\n sendJsonResponse(res, ERROR.internalErr);\n }\n } else if (req.method === \"POST\") {\n const authorization = req.headers?.authorization;\n const authToken = authorization?.split(\" \")?.pop();\n if (!authorization || !authToken) {\n sendJsonResponse(res, ERROR.unauthorized, 401);\n return;\n }",
"score": 30.400123516765746
},
{
"filename": "src/routes/Books.ts",
"retrieved_chunk": " id: epubEntry.id,\n },\n },\n 201\n );\n } else if (req.method === \"DELETE\") {\n const authorization = req.headers?.authorization;\n const authToken = authorization?.split(\" \")?.pop();\n if (!authorization || !authToken) {\n sendJsonResponse(res, ERROR.unauthorized, 401);",
"score": 26.284650290854817
},
{
"filename": "src/routes/Books.ts",
"retrieved_chunk": " let epubBuffer: Buffer;\n epubBuffer = await parseSimplePostData(req);\n let epubSizeInMB = Math.ceil(epubBuffer.length / 1e6);\n let bufferMime = await filetype.fromBuffer(epubBuffer);\n if (bufferMime.mime != \"application/epub+zip\") {\n sendJsonResponse(res, ERROR.invalidMimeForResource, 415);\n return;\n }\n if (epubSizeInMB > MAX_EPUB_SIZE_MB) {\n sendJsonResponse(res, ERROR.fileTooLarge, 400);",
"score": 23.104517394904967
},
{
"filename": "src/server.ts",
"retrieved_chunk": " } else if (url ===\"/api/signup\") {\n await RouteSignup(req, res);\n } else if (url ===\"/api/login\") {\n await RouteLogin(req, res);\n } else if (url.match(/^\\/api\\/books/)) {\n await RouteBooks(req, res);\n } else if (url.match(/^\\/api\\/issue/)) {\n await RouteIssue(req, res);\n } else {\n sendPublicFile(res, url);",
"score": 22.149089396933313
},
{
"filename": "src/routes/Signup.ts",
"retrieved_chunk": ") {\n const DB = new UserModel();\n if (req.method !== \"POST\") {\n sendJsonResponse(res, ERROR.methodNotAllowed, 405);\n return;\n }\n let data: any = await parseSimplePostData(req);\n data = data.toString();\n let parsedData: User;\n try {",
"score": 19.94307985026639
}
] | typescript | let issueData: Issue; |
import { setTimeout as delay } from 'timers/promises';
import { Injectable, Logger } from '@nestjs/common';
import { ConfigService } from '@nestjs/config';
import { Session } from '../../interfaces/session.interface';
import { SteamTokensService } from '../../modules/steam-tokens/steam-tokens.service';
@Injectable()
export class ValidateSessionsService {
private readonly logger = new Logger(ValidateSessionsService.name);
constructor(private readonly steamTokensService: SteamTokensService, private readonly configService: ConfigService) {}
public async validateSessions(sessions: Session[]) {
const valid: Session[] = [];
const invalid: Session[] = [];
for (const session of sessions) {
const { valid: isValid, errors, expires } = await this.validateSession(session);
if (isValid) {
valid.push(session);
this.logger.log(
`Valid: ${session.username}, days: ${Math.floor((expires - Date.now()) / (24 * 60 * 60 * 1000))}`,
);
} else {
invalid.push(session);
this.logger.warn(`Invalid: ${session.username}, errors: ${errors.join(', ')}`);
}
}
if (invalid.length > 0) {
this.logger.warn(`Invalid sessions:\n${invalid.map((session) => session.username).join('\n')}`);
}
await delay(1000);
}
private async validateSession(session: Session) {
const errors: string[] = [];
let expires = Date.now();
if (!session) errors.push('Invalid session');
if (session.schemaVersion !== this.configService.getOrThrow<number>('session.schemaVersion')) {
errors.push('Outdated schema version');
}
if (!session.username) errors.push('Invalid username');
if (!session.password) errors.push('Invalid password');
if (!session.steamId) errors.push('Invalid steamId');
if (!session.hasOwnProperty('sharedSecret')) errors.push('Invalid shared Secret');
if (!session.hasOwnProperty('identitySecret')) errors.push('Invalid identity Secret');
if (session.desktopRefreshToken) {
| if (!this.steamTokensService.validateRefreshToken(session.desktopRefreshToken)) { |
errors.push('Invalid desktop refresh token');
}
const tokenExpiration = this.steamTokensService.getRefreshTokenExpiration(session.desktopRefreshToken);
if (tokenExpiration > expires) expires = tokenExpiration;
}
if (session.mobileRefreshToken) {
if (!this.steamTokensService.validateRefreshToken(session.mobileRefreshToken)) {
errors.push('Invalid mobile refresh token');
}
const tokenExpiration = this.steamTokensService.getRefreshTokenExpiration(session.mobileRefreshToken);
if (tokenExpiration > expires) expires = tokenExpiration;
}
if (session.webRefreshToken) {
if (!this.steamTokensService.validateRefreshToken(session.webRefreshToken)) {
errors.push('Invalid web refresh token');
}
const tokenExpiration = this.steamTokensService.getRefreshTokenExpiration(session.webRefreshToken);
if (tokenExpiration > expires) expires = tokenExpiration;
}
if (expires < Date.now()) errors.push('Expired session');
return { valid: errors.length === 0, errors, expires };
}
}
| src/commands/validate/validate-sessions.service.ts | Sadzurami-steam-sessions-creator-97b7294 | [
{
"filename": "src/commands/create/create-sessions.service.ts",
"retrieved_chunk": " password: account.password,\n sharedSecret: account.sharedSecret || null,\n identitySecret: account.identitySecret || null,\n steamId,\n webRefreshToken,\n mobileRefreshToken,\n desktopRefreshToken,\n schemaVersion,\n };\n return session;",
"score": 61.301979419105486
},
{
"filename": "src/modules/sessions-import/sessions-import.service.ts",
"retrieved_chunk": " content = JSON.parse(content);\n if (content == null || typeof content !== 'object' || Array.isArray(content)) {\n throw new Error('Invalid session file');\n }\n const session = Object.fromEntries(\n Object.entries(content).map(([key, value]) => [key[0].toLowerCase() + key.slice(1), value]),\n ) as unknown as Session;\n result.values.push(session);\n } catch (error) {\n result.errors.push(filePath);",
"score": 53.61049044186747
},
{
"filename": "src/modules/accounts-import/accounts-import.service.ts",
"retrieved_chunk": " const result: { values: Account[]; errors: string[] } = { values: [], errors: [] };\n try {\n const { SteamLogin: username, SteamPassword: password } = JSON.parse(fileContent);\n if (!username) throw new Error('Invalid username');\n if (!password) throw new Error('Invalid password');\n const account = new Account(`${username}:${password}`);\n result.values.push(account);\n } catch (error) {\n result.errors.push(fileContent);\n }",
"score": 52.355164052620246
},
{
"filename": "src/interfaces/session.interface.ts",
"retrieved_chunk": "export interface Session {\n username: string;\n password: string;\n steamId: string;\n webRefreshToken: string;\n mobileRefreshToken: string;\n desktopRefreshToken: string;\n sharedSecret: string | null;\n identitySecret: string | null;\n schemaVersion: number;",
"score": 40.36561487867683
},
{
"filename": "src/modules/accounts-import/accounts-import.service.ts",
"retrieved_chunk": " return result;\n }\n private readAccountFromSessionFile(fileContent: string) {\n const result: { values: Account[]; errors: string[] } = { values: [], errors: [] };\n try {\n const { Username, Password, SharedSecret, IdentitySecret } = JSON.parse(fileContent);\n if (!Username) throw new Error('Invalid username');\n if (!Password) throw new Error('Invalid password');\n const account = new Account(`${Username}:${Password}:${SharedSecret || ''}:${IdentitySecret || ''}`);\n result.values.push(account);",
"score": 40.23430237031837
}
] | typescript | if (!this.steamTokensService.validateRefreshToken(session.desktopRefreshToken)) { |
import fs from 'fs/promises';
import inquirer from 'inquirer';
import pQueue from 'p-queue';
import { setTimeout as delay } from 'timers/promises';
import { Injectable, Logger } from '@nestjs/common';
import { Account as IAccount } from '../../interfaces/account.interface';
import { Secrets } from '../../interfaces/secrets.interface';
class Account implements IAccount {
public readonly username: string;
public readonly password: string;
public sharedSecret: string | null = null;
public identitySecret: string | null = null;
constructor(account: string) {
account = account.trim();
if (account.length === 0) throw new Error('Invalid account');
const parts = account.split(':').map((part) => part.trim());
if (parts.length < 2) throw new Error('Invalid account');
const [username, password, sharedSecret, identitySecret] = parts;
this.username = username;
this.password = password;
if (sharedSecret) this.sharedSecret = sharedSecret;
if (identitySecret) this.identitySecret = identitySecret;
}
}
@Injectable()
export class AccountsImportService {
private readonly logger = new Logger(AccountsImportService.name);
private readonly readFilesQueue = new pQueue({ concurrency: 100 });
public async loadAccounts(input: string[] | string) {
if (!input) return [];
if (!Array.isArray(input)) input = [input];
if (input.length === 0) return [];
let accounts: Account[] = [];
const errors: string[] = [];
const readResults = await Promise.all(input.map((input) => this.readAccountsFromInput(input)));
for (const result of readResults) {
accounts.push(...result.values);
errors.push(...result.errors);
}
accounts = this.removeDuplicates(accounts);
if (errors.length > 0 && accounts.length > 0) {
this.logger.warn(`The following account sources are invalid:\n${errors.join('\n')}`);
await delay(1000);
const { confirm } = await inquirer.prompt({
type: 'confirm',
name: 'confirm',
message: 'Continue with the valid accounts?',
default: false,
});
if (!confirm) throw new Error('Aborted by user');
}
return accounts;
}
public assignSecretsToAccounts(accounts: Account[], secrets: Secrets[]) {
const secretsMap = new Map<string, Secrets>();
for (const secret of secrets) {
| secretsMap.set(secret.username, secret); |
// some existing steam-oriented apps are case-insensitive to usernames in secrets
secretsMap.set(secret.username.toLowerCase(), secret);
}
for (const account of accounts) {
let secret = secretsMap.get(account.username);
if (!secret) secret = secretsMap.get(account.username.toLowerCase());
if (!secret) continue;
account.sharedSecret = secret.sharedSecret;
account.identitySecret = secret.identitySecret;
}
}
private removeDuplicates(accounts: Account[]) {
const map = new Map<string, Account>();
for (const account of accounts) map.set(account.username, account);
return [...map.values()];
}
private async readAccountsFromInput(input: string) {
const inputType = await this.inferInputType(input);
if (inputType === 'file') return this.readAccountsFromFile(input);
if (inputType === 'string') return this.readAccountFromString(input);
if (inputType === 'directory') return { values: [], errors: [input] };
}
private async readAccountsFromFile(filePath: string) {
const result: { values: Account[]; errors: string[] } = { values: [], errors: [] };
try {
let content = await this.readFilesQueue.add(() => fs.readFile(filePath, 'utf-8'));
content = content.trim();
if (content.length === 0) throw new Error('Empty file');
// session file
if (filePath.endsWith('.steamsession')) {
const readResults = this.readAccountFromSessionFile(content);
result.values.push(...readResults.values);
if (readResults.errors.length > 0) result.errors.push(filePath);
return result;
}
// asf json
if (filePath.endsWith('.json') && content.includes('"SteamLogin"')) {
const readResults = this.readAccountFromAsfJson(content);
result.values.push(...readResults.values);
if (readResults.errors.length > 0) result.errors.push(filePath);
return result;
}
// plain text
if (content.includes(':')) {
const lines = content
.split(/\s+|\r?\n/)
.map((l) => l.trim())
.filter((l) => l.length > 0);
if (lines.length === 0) throw new Error('Empty file');
for (const line of lines) {
const readResults = this.readAccountFromString(line);
result.values.push(...readResults.values);
if (readResults.errors.length > 0) result.errors.push(line);
}
return result;
}
throw new Error('Unsupported file format');
} catch (error) {
result.errors.push(filePath);
}
return result;
}
private readAccountFromString(str: string) {
const result: { values: Account[]; errors: string[] } = { values: [], errors: [] };
try {
const account = new Account(str);
result.values.push(account);
} catch (error) {
result.errors.push(str);
}
return result;
}
private readAccountFromAsfJson(fileContent: string) {
const result: { values: Account[]; errors: string[] } = { values: [], errors: [] };
try {
const { SteamLogin: username, SteamPassword: password } = JSON.parse(fileContent);
if (!username) throw new Error('Invalid username');
if (!password) throw new Error('Invalid password');
const account = new Account(`${username}:${password}`);
result.values.push(account);
} catch (error) {
result.errors.push(fileContent);
}
return result;
}
private readAccountFromSessionFile(fileContent: string) {
const result: { values: Account[]; errors: string[] } = { values: [], errors: [] };
try {
const { Username, Password, SharedSecret, IdentitySecret } = JSON.parse(fileContent);
if (!Username) throw new Error('Invalid username');
if (!Password) throw new Error('Invalid password');
const account = new Account(`${Username}:${Password}:${SharedSecret || ''}:${IdentitySecret || ''}`);
result.values.push(account);
} catch (error) {
result.errors.push(fileContent);
}
return result;
}
private async inferInputType(input: string) {
if (typeof input !== 'string') throw new Error(`Invalid input type: '${typeof input}'`);
try {
const stats = await fs.stat(input);
if (stats.isFile()) return 'file';
if (stats.isDirectory()) return 'directory';
} catch (error) {
return 'string';
}
}
}
| src/modules/accounts-import/accounts-import.service.ts | Sadzurami-steam-sessions-creator-97b7294 | [
{
"filename": "src/modules/secrets-import/secrets-import.service.ts",
"retrieved_chunk": " default: false,\n });\n if (!confirm) throw new Error('Aborted by user');\n }\n return secrets;\n }\n private removeDuplicates(secrets: Secrets[]) {\n const map = new Map<string, Secrets>();\n for (const secret of secrets) map.set(secret.username, secret);\n return [...map.values()];",
"score": 68.54405924283843
},
{
"filename": "src/modules/sessions-import/sessions-import.service.ts",
"retrieved_chunk": " });\n if (!confirm) throw new Error('Aborted by user');\n }\n return sessions;\n }\n private removeDuplicates(sessions: Session[]) {\n const map = new Map<string, Session>();\n for (const session of sessions) map.set(session.username, session);\n return [...map.values()];\n }",
"score": 32.98251921248723
},
{
"filename": "src/modules/proxies-import/proxies-import.service.ts",
"retrieved_chunk": " if (!confirm) throw new Error('Aborted by user');\n }\n return proxies;\n }\n private removeDuplicates(proxies: Proxy[]) {\n const map = new Map<string, Proxy>();\n for (const proxy of proxies) map.set(proxy.toString(), proxy);\n return [...map.values()];\n }\n private async readProxyFromInput(input: string) {",
"score": 29.61985811614349
},
{
"filename": "src/modules/secrets-import/secrets-import.service.ts",
"retrieved_chunk": " public readonly identitySecret: string;\n constructor(secrets: string) {\n let parsedSecrets: any;\n try {\n parsedSecrets = JSON.parse(secrets);\n } catch (error) {}\n if (typeof parsedSecrets !== 'object' || parsedSecrets === null) throw new Error('Secrets string is invalid');\n const { shared_secret, identity_secret, account_name } = parsedSecrets;\n if (!shared_secret) throw new Error('Shared secret is missing');\n this.sharedSecret = shared_secret;",
"score": 26.100393219146163
},
{
"filename": "src/modules/secrets-import/secrets-import.service.ts",
"retrieved_chunk": " }\n secrets = this.removeDuplicates(secrets);\n if (errors.length > 0) {\n this.logger.warn(`The following secret sources are invalid:\\n${errors.join('\\n')}`);\n await delay(1000);\n const { confirm } = await inquirer.prompt({\n type: 'confirm',\n name: 'confirm',\n message:\n secrets.length > 0 ? `Continue with ${secrets.length} valid secrets?` : 'Continue without any secrets?',",
"score": 21.565443619849397
}
] | typescript | secretsMap.set(secret.username, secret); |
import { Client } from "pg";
import { DB as DBConfig } from "../common/const";
import { Book } from "../common/types";
export default class BookModel {
private readonly client: Client;
constructor() {
this.client = new Client({
host: DBConfig.HOST,
user: DBConfig.USER,
password: DBConfig.PASSWORD,
database: DBConfig.DB_NAME,
port: DBConfig.PORT,
ssl: true
})
}
async init(): Promise<void> {
try {
await this.client.connect();
await this.client.query(`CREATE TABLE IF NOT EXISTS books (
id VARCHAR(255) UNIQUE NOT NULL,
userid VARCHAR(255) UNIQUE NOT NULL,
title VARCHAR(255) NOT NULL,
author VARCHAR(255) NOT NULL,
signature VARCHAR(255) NOT NULL,
path VARCHAR(255) NOT NULL,
cover VARCHAR(255) NOT NULL
)
`);
} catch (error) {
throw error;
}
}
async bookExists(bookid: string): Promise<boolean> {
const result = await this.client.query("SELECT EXISTS (SELECT 1 FROM books WHERE id = $1)", [bookid])
return result.rows[0].exists
}
async getBooks(): Promise<Array<Book> | null> {
try {
let response = await this.client.query("SELECT * FROM books");
return response.rows;
} catch (error) {
console.error(error);
return;
}
}
async pushBook(book: Book): Promise<Book | null> {
try {
await this.client.query(`
INSERT INTO books (id, userid, author, title, path, cover, signature)
VALUES ($1, $2, $3, $4, $5, $6, $7)`,
[book.id, book.userid, | book.author, book.title, book.path, book?.cover ?? "", book.signature]
)
return book; |
} catch (error) {
console.error(error);
return null;
}
}
async deleteBook(bookid: string, userid?: string) {
try {
await this.client.query(`DELETE FROM books WHERE id = $1 ${userid && "AND userid = $2"}`, [bookid, userid ?? ""]);
return bookid;
} catch (error) {
console.error(error);
return null;
}
}
async getBook(bookid: string, sig?: string): Promise<Book | null> {
try {
const response = await this.client.query(`SELECT * FROM books WHERE ${bookid ? "id = $1" : "signature = $1"}`, [bookid || sig]);
return response.rows[0];
} catch (error) {
console.error(error);
return null;
}
}
async updateBook() { /* TODO */ }
async close(): Promise<void> {
await this.client.end();
}
}
| src/models/BookModel.ts | Aadv1k-quillia-52c5b34 | [
{
"filename": "src/common/types.ts",
"retrieved_chunk": "}\nexport interface Book {\n id: string,\n userid: string,\n title: string,\n author: string,\n path: string,\n signature: string\n cover?: string,\n}",
"score": 58.32371995230327
},
{
"filename": "src/routes/Books.ts",
"retrieved_chunk": " let epubEntry: Book = {\n id: epubID,\n userid: parsedAuthToken.id,\n title: epub.metadata?.title ?? epubID.split(\"-\").pop(),\n author: epub.metadata?.creator ?? parsedAuthToken.email,\n path: epubFilePermalink,\n signature: epubSignature,\n cover: epubCoverPermalink,\n };\n const pushed = await BOOK_DB.pushBook(epubEntry);",
"score": 53.25527503889214
},
{
"filename": "src/routes/Issue.ts",
"retrieved_chunk": " sendJsonResponse(\n res,\n {\n error: null,\n message: `successfully created a new issue of id ${issueEntry.id}`,\n data: {\n id: pushed.id,\n borrower: pushed.borrowerid,\n lender: pushed.lenderid,\n book: foundBook.title,",
"score": 42.24740677473597
},
{
"filename": "src/routes/Books.ts",
"retrieved_chunk": " }\n if (!data.bookid) {\n sendJsonResponse(res, ERROR.badRequest, 400);\n return;\n }\n let bookDeleted = await BOOK_DB.deleteBook(data.bookid, parsedAuthToken.id);\n if (!bookDeleted) {\n sendJsonResponse(res, {\n error: \"unable-to-delete-book\",\n message: `was unable to delete book ${data.bookid}, perhaps the id was invalid?`,",
"score": 39.99559549621636
},
{
"filename": "src/routes/Books.ts",
"retrieved_chunk": " if (!pushed) {\n sendJsonResponse(res, ERROR.internalErr, 500);\n return;\n }\n sendJsonResponse(\n res,\n {\n error: null,\n message: `successfully published a book of id ${epubEntry.id}`,\n data: {",
"score": 37.09073436068709
}
] | typescript | book.author, book.title, book.path, book?.cover ?? "", book.signature]
)
return book; |
import { setTimeout as delay } from 'timers/promises';
import { Injectable, Logger } from '@nestjs/common';
import { ConfigService } from '@nestjs/config';
import { Session } from '../../interfaces/session.interface';
import { SteamTokensService } from '../../modules/steam-tokens/steam-tokens.service';
@Injectable()
export class ValidateSessionsService {
private readonly logger = new Logger(ValidateSessionsService.name);
constructor(private readonly steamTokensService: SteamTokensService, private readonly configService: ConfigService) {}
public async validateSessions(sessions: Session[]) {
const valid: Session[] = [];
const invalid: Session[] = [];
for (const session of sessions) {
const { valid: isValid, errors, expires } = await this.validateSession(session);
if (isValid) {
valid.push(session);
this.logger.log(
`Valid: ${session.username}, days: ${Math.floor((expires - Date.now()) / (24 * 60 * 60 * 1000))}`,
);
} else {
invalid.push(session);
this.logger.warn(`Invalid: ${session.username}, errors: ${errors.join(', ')}`);
}
}
if (invalid.length > 0) {
this.logger.warn(`Invalid sessions:\n${invalid.map((session) => session.username).join('\n')}`);
}
await delay(1000);
}
private async validateSession(session: Session) {
const errors: string[] = [];
let expires = Date.now();
if (!session) errors.push('Invalid session');
if (session.schemaVersion !== this.configService.getOrThrow<number>('session.schemaVersion')) {
errors.push('Outdated schema version');
}
if (!session.username) errors.push('Invalid username');
if (!session.password) errors.push('Invalid password');
if (!session.steamId) errors.push('Invalid steamId');
if (!session.hasOwnProperty('sharedSecret')) errors.push('Invalid shared Secret');
if (!session.hasOwnProperty('identitySecret')) errors.push('Invalid identity Secret');
if (session.desktopRefreshToken) {
if (!this.steamTokensService.validateRefreshToken(session.desktopRefreshToken)) {
errors.push('Invalid desktop refresh token');
}
const tokenExpiration = this.steamTokensService.getRefreshTokenExpiration(session.desktopRefreshToken);
if (tokenExpiration > expires) expires = tokenExpiration;
}
| if (session.mobileRefreshToken) { |
if (!this.steamTokensService.validateRefreshToken(session.mobileRefreshToken)) {
errors.push('Invalid mobile refresh token');
}
const tokenExpiration = this.steamTokensService.getRefreshTokenExpiration(session.mobileRefreshToken);
if (tokenExpiration > expires) expires = tokenExpiration;
}
if (session.webRefreshToken) {
if (!this.steamTokensService.validateRefreshToken(session.webRefreshToken)) {
errors.push('Invalid web refresh token');
}
const tokenExpiration = this.steamTokensService.getRefreshTokenExpiration(session.webRefreshToken);
if (tokenExpiration > expires) expires = tokenExpiration;
}
if (expires < Date.now()) errors.push('Expired session');
return { valid: errors.length === 0, errors, expires };
}
}
| src/commands/validate/validate-sessions.service.ts | Sadzurami-steam-sessions-creator-97b7294 | [
{
"filename": "src/commands/create/create-sessions.service.ts",
"retrieved_chunk": " password: account.password,\n sharedSecret: account.sharedSecret || null,\n identitySecret: account.identitySecret || null,\n steamId,\n webRefreshToken,\n mobileRefreshToken,\n desktopRefreshToken,\n schemaVersion,\n };\n return session;",
"score": 41.133272257360694
},
{
"filename": "src/modules/sessions-import/sessions-import.service.ts",
"retrieved_chunk": " content = JSON.parse(content);\n if (content == null || typeof content !== 'object' || Array.isArray(content)) {\n throw new Error('Invalid session file');\n }\n const session = Object.fromEntries(\n Object.entries(content).map(([key, value]) => [key[0].toLowerCase() + key.slice(1), value]),\n ) as unknown as Session;\n result.values.push(session);\n } catch (error) {\n result.errors.push(filePath);",
"score": 32.24820303141104
},
{
"filename": "src/commands/create/create-sessions.service.ts",
"retrieved_chunk": " const desktopRefreshToken = await this.createRefreshToken(account, 'desktop');\n await delay(delayMs);\n const webRefreshToken = await this.createRefreshToken(account, 'web');\n await delay(delayMs);\n const mobileRefreshToken = await this.createRefreshToken(account, 'mobile');\n await delay(delayMs);\n const steamId = this.getSteamIdFromRefreshToken(webRefreshToken);\n const schemaVersion = this.schemaVersion;\n const session: ISession = {\n username: account.username,",
"score": 27.28138559788901
},
{
"filename": "src/interfaces/session.interface.ts",
"retrieved_chunk": "export interface Session {\n username: string;\n password: string;\n steamId: string;\n webRefreshToken: string;\n mobileRefreshToken: string;\n desktopRefreshToken: string;\n sharedSecret: string | null;\n identitySecret: string | null;\n schemaVersion: number;",
"score": 23.31061800609816
},
{
"filename": "src/modules/accounts-import/accounts-import.service.ts",
"retrieved_chunk": " const result: { values: Account[]; errors: string[] } = { values: [], errors: [] };\n try {\n let content = await this.readFilesQueue.add(() => fs.readFile(filePath, 'utf-8'));\n content = content.trim();\n if (content.length === 0) throw new Error('Empty file');\n // session file\n if (filePath.endsWith('.steamsession')) {\n const readResults = this.readAccountFromSessionFile(content);\n result.values.push(...readResults.values);\n if (readResults.errors.length > 0) result.errors.push(filePath);",
"score": 22.711118005812892
}
] | typescript | if (session.mobileRefreshToken) { |
import { setTimeout as delay } from 'timers/promises';
import { Injectable, Logger } from '@nestjs/common';
import { ConfigService } from '@nestjs/config';
import { Session } from '../../interfaces/session.interface';
import { SteamTokensService } from '../../modules/steam-tokens/steam-tokens.service';
@Injectable()
export class ValidateSessionsService {
private readonly logger = new Logger(ValidateSessionsService.name);
constructor(private readonly steamTokensService: SteamTokensService, private readonly configService: ConfigService) {}
public async validateSessions(sessions: Session[]) {
const valid: Session[] = [];
const invalid: Session[] = [];
for (const session of sessions) {
const { valid: isValid, errors, expires } = await this.validateSession(session);
if (isValid) {
valid.push(session);
this.logger.log(
`Valid: ${session.username}, days: ${Math.floor((expires - Date.now()) / (24 * 60 * 60 * 1000))}`,
);
} else {
invalid.push(session);
this.logger.warn(`Invalid: ${session.username}, errors: ${errors.join(', ')}`);
}
}
if (invalid.length > 0) {
this.logger.warn(`Invalid sessions:\n${invalid.map((session) => session.username).join('\n')}`);
}
await delay(1000);
}
private async validateSession(session: Session) {
const errors: string[] = [];
let expires = Date.now();
if (!session) errors.push('Invalid session');
if (session.schemaVersion !== this.configService.getOrThrow<number>('session.schemaVersion')) {
errors.push('Outdated schema version');
}
if (!session.username) errors.push('Invalid username');
if (!session.password) errors.push('Invalid password');
if (!session.steamId) errors.push('Invalid steamId');
if (!session.hasOwnProperty('sharedSecret')) errors.push('Invalid shared Secret');
if (!session.hasOwnProperty('identitySecret')) errors.push('Invalid identity Secret');
if (session.desktopRefreshToken) {
if (!this.steamTokensService.validateRefreshToken(session.desktopRefreshToken)) {
errors.push('Invalid desktop refresh token');
}
const tokenExpiration = this.steamTokensService.getRefreshTokenExpiration(session.desktopRefreshToken);
if (tokenExpiration > expires) expires = tokenExpiration;
}
if (session.mobileRefreshToken) {
if (!this.steamTokensService.validateRefreshToken(session.mobileRefreshToken)) {
errors.push('Invalid mobile refresh token');
}
const tokenExpiration = this.steamTokensService.getRefreshTokenExpiration(session.mobileRefreshToken);
if (tokenExpiration > expires) expires = tokenExpiration;
}
| if (session.webRefreshToken) { |
if (!this.steamTokensService.validateRefreshToken(session.webRefreshToken)) {
errors.push('Invalid web refresh token');
}
const tokenExpiration = this.steamTokensService.getRefreshTokenExpiration(session.webRefreshToken);
if (tokenExpiration > expires) expires = tokenExpiration;
}
if (expires < Date.now()) errors.push('Expired session');
return { valid: errors.length === 0, errors, expires };
}
}
| src/commands/validate/validate-sessions.service.ts | Sadzurami-steam-sessions-creator-97b7294 | [
{
"filename": "src/commands/create/create-sessions.service.ts",
"retrieved_chunk": " password: account.password,\n sharedSecret: account.sharedSecret || null,\n identitySecret: account.identitySecret || null,\n steamId,\n webRefreshToken,\n mobileRefreshToken,\n desktopRefreshToken,\n schemaVersion,\n };\n return session;",
"score": 28.159520390391453
},
{
"filename": "src/commands/create/create-sessions.service.ts",
"retrieved_chunk": " const desktopRefreshToken = await this.createRefreshToken(account, 'desktop');\n await delay(delayMs);\n const webRefreshToken = await this.createRefreshToken(account, 'web');\n await delay(delayMs);\n const mobileRefreshToken = await this.createRefreshToken(account, 'mobile');\n await delay(delayMs);\n const steamId = this.getSteamIdFromRefreshToken(webRefreshToken);\n const schemaVersion = this.schemaVersion;\n const session: ISession = {\n username: account.username,",
"score": 25.576796973987445
},
{
"filename": "src/commands/create/create-sessions.service.ts",
"retrieved_chunk": " } catch (error) {\n throw new Error('Failed to create session', { cause: error });\n }\n }\n private async createRefreshToken(account: Account, platform: 'web' | 'mobile' | 'desktop') {\n try {\n return await pRetry(() => this.steamTokensService.createRefreshToken(account, platform), {\n retries: 3,\n minTimeout: 31000,\n maxTimeout: 31000,",
"score": 18.890196203061933
},
{
"filename": "src/commands/create/create-sessions.service.ts",
"retrieved_chunk": " });\n } catch (error) {\n throw new Error('Failed to create refresh token', { cause: error });\n }\n }\n private getSteamIdFromRefreshToken(token: string) {\n try {\n const { sub: steamId } = this.steamTokensService.decodeRefreshToken(token);\n if (!steamId) throw new Error('SteamId is missing from refresh token');\n return steamId;",
"score": 18.62754591180383
},
{
"filename": "src/interfaces/session.interface.ts",
"retrieved_chunk": "export interface Session {\n username: string;\n password: string;\n steamId: string;\n webRefreshToken: string;\n mobileRefreshToken: string;\n desktopRefreshToken: string;\n sharedSecret: string | null;\n identitySecret: string | null;\n schemaVersion: number;",
"score": 17.46164700074517
}
] | typescript | if (session.webRefreshToken) { |
import { Client } from "pg";
import { DB as DBConfig } from "../common/const";
import { Book } from "../common/types";
export default class BookModel {
private readonly client: Client;
constructor() {
this.client = new Client({
host: DBConfig.HOST,
user: DBConfig.USER,
password: DBConfig.PASSWORD,
database: DBConfig.DB_NAME,
port: DBConfig.PORT,
ssl: true
})
}
async init(): Promise<void> {
try {
await this.client.connect();
await this.client.query(`CREATE TABLE IF NOT EXISTS books (
id VARCHAR(255) UNIQUE NOT NULL,
userid VARCHAR(255) UNIQUE NOT NULL,
title VARCHAR(255) NOT NULL,
author VARCHAR(255) NOT NULL,
signature VARCHAR(255) NOT NULL,
path VARCHAR(255) NOT NULL,
cover VARCHAR(255) NOT NULL
)
`);
} catch (error) {
throw error;
}
}
async bookExists(bookid: string): Promise<boolean> {
const result = await this.client.query("SELECT EXISTS (SELECT 1 FROM books WHERE id = $1)", [bookid])
return result.rows[0].exists
}
async getBooks(): Promise<Array<Book> | null> {
try {
let response = await this.client.query("SELECT * FROM books");
return response.rows;
} catch (error) {
console.error(error);
return;
}
}
async pushBook(book: Book): Promise<Book | null> {
try {
await this.client.query(`
INSERT INTO books (id, userid, author, title, path, cover, signature)
VALUES ($1, $2, $3, $4, $5, $6, $7)`,
[book.id, book.userid | , book.author, book.title, book.path, book?.cover ?? "", book.signature]
)
return book; |
} catch (error) {
console.error(error);
return null;
}
}
async deleteBook(bookid: string, userid?: string) {
try {
await this.client.query(`DELETE FROM books WHERE id = $1 ${userid && "AND userid = $2"}`, [bookid, userid ?? ""]);
return bookid;
} catch (error) {
console.error(error);
return null;
}
}
async getBook(bookid: string, sig?: string): Promise<Book | null> {
try {
const response = await this.client.query(`SELECT * FROM books WHERE ${bookid ? "id = $1" : "signature = $1"}`, [bookid || sig]);
return response.rows[0];
} catch (error) {
console.error(error);
return null;
}
}
async updateBook() { /* TODO */ }
async close(): Promise<void> {
await this.client.end();
}
}
| src/models/BookModel.ts | Aadv1k-quillia-52c5b34 | [
{
"filename": "src/common/types.ts",
"retrieved_chunk": "}\nexport interface Book {\n id: string,\n userid: string,\n title: string,\n author: string,\n path: string,\n signature: string\n cover?: string,\n}",
"score": 58.32371995230327
},
{
"filename": "src/routes/Books.ts",
"retrieved_chunk": " let epubEntry: Book = {\n id: epubID,\n userid: parsedAuthToken.id,\n title: epub.metadata?.title ?? epubID.split(\"-\").pop(),\n author: epub.metadata?.creator ?? parsedAuthToken.email,\n path: epubFilePermalink,\n signature: epubSignature,\n cover: epubCoverPermalink,\n };\n const pushed = await BOOK_DB.pushBook(epubEntry);",
"score": 53.25527503889214
},
{
"filename": "src/routes/Issue.ts",
"retrieved_chunk": " sendJsonResponse(\n res,\n {\n error: null,\n message: `successfully created a new issue of id ${issueEntry.id}`,\n data: {\n id: pushed.id,\n borrower: pushed.borrowerid,\n lender: pushed.lenderid,\n book: foundBook.title,",
"score": 42.24740677473597
},
{
"filename": "src/routes/Books.ts",
"retrieved_chunk": " }\n if (!data.bookid) {\n sendJsonResponse(res, ERROR.badRequest, 400);\n return;\n }\n let bookDeleted = await BOOK_DB.deleteBook(data.bookid, parsedAuthToken.id);\n if (!bookDeleted) {\n sendJsonResponse(res, {\n error: \"unable-to-delete-book\",\n message: `was unable to delete book ${data.bookid}, perhaps the id was invalid?`,",
"score": 39.99559549621636
},
{
"filename": "src/routes/Books.ts",
"retrieved_chunk": " if (!pushed) {\n sendJsonResponse(res, ERROR.internalErr, 500);\n return;\n }\n sendJsonResponse(\n res,\n {\n error: null,\n message: `successfully published a book of id ${epubEntry.id}`,\n data: {",
"score": 37.09073436068709
}
] | typescript | , book.author, book.title, book.path, book?.cover ?? "", book.signature]
)
return book; |
import { Client } from "pg";
import { DB as DBConfig } from "../common/const";
import { User } from "../common/types";
export default class UserModel {
private readonly client: Client;
constructor() {
this.client = new Client({
host: DBConfig.HOST,
user: DBConfig.USER,
password: DBConfig.PASSWORD,
database: DBConfig.DB_NAME,
port: DBConfig.PORT,
ssl: true
})
}
async init(): Promise<void> {
try {
await this.client.connect();
await this.client.query(`
CREATE TABLE IF NOT EXISTS users (
id VARCHAR(255) UNIQUE NOT NULL,
email VARCHAR(255) NOT NULL,
password VARCHAR(255) NOT NULL
)
`);
} catch (error) {
throw error
}
}
async userExists(email?: string): Promise<boolean> {
const result = await this.client.query("SELECT EXISTS (SELECT 1 FROM users WHERE email = $1)", [email])
return result.rows[0].exists
}
async getUserByID(id: string): Promise<User | null> {
try {
const response = await this.client.query(
`SELECT * FROM users
WHERE id = $1`,
[id]
);
return response.rows[0]
} catch (error) {
return null;
}
}
async getUser(email: string, id?: string): Promise<User | null> {
try {
const response = await this.client.query(
`SELECT * FROM users
WHERE email = $1
OR id = $2`,
[email, id ?? ""]
);
return response.rows[0]
} catch (error) {
return null;
}
}
async getUsers(): Promise <Array<object> | null> {
try {
let data = await this.client.query(`SELECT * FROM users`)
return data.rows;
} catch (error) {
console.error(error);
return null;
}
}
async updateUser(user: User, toUpdate: object ) {
let blobs = Object.keys(toUpdate).map((e, i) => {return `${e} = \$${i+1}`})
let toUpdateQuery = blobs.join(" AND ");
let query = `UPDATE users SET ${toUpdateQuery} WHERE $1 OR $2`
try {
this.client.query(query, | [user.id, user.email]); |
} catch (error) {
console.error(error);
return null;
}
}
async deleteUser(user: User): Promise<User | void> {
try {
await this.client.query(`DELETE FROM users WHERE id = $1 OR email = $2`, [user.id, user.email]);
return user;
} catch (error) {
throw error;
}
}
async pushUser(user: User): Promise<User | void> {
try {
await this.client.query("INSERT INTO users (id, email, password) VALUES ($1, $2, $3)", [user.id, user.email, user.password]);
return user;
} catch (error) {
throw error;
}
}
async pushTokenForUser(token: string, userid: string): Promise<void | null> {
try {
await this.client.query("INSERT INTO tokens (userid, token) VALUES ($1, $2)", [userid, token]);
} catch (error) {
console.error(error);
return null;
}
}
async deleteTokenForUser(token?: string, userid?: string): Promise<void | null> {
try {
await this.client.query("DELETE FROM tokens WHERE token = $1 OR userid = $2", [token, userid]);
} catch (error) {
console.error(error);
return null;
}
}
async close(): Promise<void> {
await this.client.end();
}
}
| src/models/UserModel.ts | Aadv1k-quillia-52c5b34 | [
{
"filename": "src/models/BookModel.ts",
"retrieved_chunk": " await this.client.query(`DELETE FROM books WHERE id = $1 ${userid && \"AND userid = $2\"}`, [bookid, userid ?? \"\"]);\n return bookid;\n } catch (error) {\n console.error(error); \n return null;\n }\n }\n async getBook(bookid: string, sig?: string): Promise<Book | null> {\n try {\n const response = await this.client.query(`SELECT * FROM books WHERE ${bookid ? \"id = $1\" : \"signature = $1\"}`, [bookid || sig]);",
"score": 31.862470247536592
},
{
"filename": "src/models/IssueModel.ts",
"retrieved_chunk": " try {\n await this.client.query(\n \"DELETE FROM issues WHERE issueid = $1 OR borrowerid = $2 OR lenderid = $3\",\n [issueid ?? \"\", borrowerid ?? \"\", lenderid ?? \"\"]\n );\n } catch (error) {\n console.error(error);\n return null;\n }\n }",
"score": 30.968502368860936
},
{
"filename": "src/models/BookModel.ts",
"retrieved_chunk": " }\n }\n async bookExists(bookid: string): Promise<boolean> {\n const result = await this.client.query(\"SELECT EXISTS (SELECT 1 FROM books WHERE id = $1)\", [bookid])\n return result.rows[0].exists\n } \n async getBooks(): Promise<Array<Book> | null> {\n try {\n let response = await this.client.query(\"SELECT * FROM books\");\n return response.rows;",
"score": 27.08680290197727
},
{
"filename": "src/models/IssueModel.ts",
"retrieved_chunk": " const result = await this.client.query(\"SELECT EXISTS (SELECT 1 FROM issues WHERE id = $1)\", [issueid])\n return result.rows[0].exists\n } \n async pushIssue(data: Issue): Promise<Issue | null> {\n try {\n await this.client.query(\n \"INSERT INTO issues (id, lenderid, borrowerid, bookid) VALUES ($1, $2, $3, $4)\",\n [data.id, data.lenderid, data.borrowerid, data.bookid]\n );\n return data;",
"score": 27.052593414400363
},
{
"filename": "src/models/IssueModel.ts",
"retrieved_chunk": " async getIssues(borrowerid: string): Promise<Array<Issue> | null> {\n try {\n let response = await this.client.query(\n \"SELECT * FROM issues WHERE borrowerid = $1\",\n [borrowerid]\n );\n return response.rows;\n } catch (error) {\n console.error(error);\n return null;",
"score": 23.703192292824603
}
] | typescript | [user.id, user.email]); |
import { Client } from "pg";
import { DB as DBConfig } from "../common/const";
import { User } from "../common/types";
export default class UserModel {
private readonly client: Client;
constructor() {
this.client = new Client({
host: DBConfig.HOST,
user: DBConfig.USER,
password: DBConfig.PASSWORD,
database: DBConfig.DB_NAME,
port: DBConfig.PORT,
ssl: true
})
}
async init(): Promise<void> {
try {
await this.client.connect();
await this.client.query(`
CREATE TABLE IF NOT EXISTS users (
id VARCHAR(255) UNIQUE NOT NULL,
email VARCHAR(255) NOT NULL,
password VARCHAR(255) NOT NULL
)
`);
} catch (error) {
throw error
}
}
async userExists(email?: string): Promise<boolean> {
const result = await this.client.query("SELECT EXISTS (SELECT 1 FROM users WHERE email = $1)", [email])
return result.rows[0].exists
}
async getUserByID(id: string): Promise<User | null> {
try {
const response = await this.client.query(
`SELECT * FROM users
WHERE id = $1`,
[id]
);
return response.rows[0]
} catch (error) {
return null;
}
}
async getUser(email: string, id?: string): Promise<User | null> {
try {
const response = await this.client.query(
`SELECT * FROM users
WHERE email = $1
OR id = $2`,
[email, id ?? ""]
);
return response.rows[0]
} catch (error) {
return null;
}
}
async getUsers(): Promise <Array<object> | null> {
try {
let data = await this.client.query(`SELECT * FROM users`)
return data.rows;
} catch (error) {
console.error(error);
return null;
}
}
async updateUser(user: User, toUpdate: object ) {
let blobs = Object.keys(toUpdate).map((e, i) => {return `${e} = \$${i+1}`})
let toUpdateQuery = blobs.join(" AND ");
let query = `UPDATE users SET ${toUpdateQuery} WHERE $1 OR $2`
try {
| this.client.query(query, [user.id, user.email]); |
} catch (error) {
console.error(error);
return null;
}
}
async deleteUser(user: User): Promise<User | void> {
try {
await this.client.query(`DELETE FROM users WHERE id = $1 OR email = $2`, [user.id, user.email]);
return user;
} catch (error) {
throw error;
}
}
async pushUser(user: User): Promise<User | void> {
try {
await this.client.query("INSERT INTO users (id, email, password) VALUES ($1, $2, $3)", [user.id, user.email, user.password]);
return user;
} catch (error) {
throw error;
}
}
async pushTokenForUser(token: string, userid: string): Promise<void | null> {
try {
await this.client.query("INSERT INTO tokens (userid, token) VALUES ($1, $2)", [userid, token]);
} catch (error) {
console.error(error);
return null;
}
}
async deleteTokenForUser(token?: string, userid?: string): Promise<void | null> {
try {
await this.client.query("DELETE FROM tokens WHERE token = $1 OR userid = $2", [token, userid]);
} catch (error) {
console.error(error);
return null;
}
}
async close(): Promise<void> {
await this.client.end();
}
}
| src/models/UserModel.ts | Aadv1k-quillia-52c5b34 | [
{
"filename": "src/models/BookModel.ts",
"retrieved_chunk": " await this.client.query(`DELETE FROM books WHERE id = $1 ${userid && \"AND userid = $2\"}`, [bookid, userid ?? \"\"]);\n return bookid;\n } catch (error) {\n console.error(error); \n return null;\n }\n }\n async getBook(bookid: string, sig?: string): Promise<Book | null> {\n try {\n const response = await this.client.query(`SELECT * FROM books WHERE ${bookid ? \"id = $1\" : \"signature = $1\"}`, [bookid || sig]);",
"score": 34.59740086170254
},
{
"filename": "src/models/IssueModel.ts",
"retrieved_chunk": " try {\n await this.client.query(\n \"DELETE FROM issues WHERE issueid = $1 OR borrowerid = $2 OR lenderid = $3\",\n [issueid ?? \"\", borrowerid ?? \"\", lenderid ?? \"\"]\n );\n } catch (error) {\n console.error(error);\n return null;\n }\n }",
"score": 34.50035760910686
},
{
"filename": "src/models/BookModel.ts",
"retrieved_chunk": " }\n }\n async bookExists(bookid: string): Promise<boolean> {\n const result = await this.client.query(\"SELECT EXISTS (SELECT 1 FROM books WHERE id = $1)\", [bookid])\n return result.rows[0].exists\n } \n async getBooks(): Promise<Array<Book> | null> {\n try {\n let response = await this.client.query(\"SELECT * FROM books\");\n return response.rows;",
"score": 27.08680290197727
},
{
"filename": "src/models/IssueModel.ts",
"retrieved_chunk": " const result = await this.client.query(\"SELECT EXISTS (SELECT 1 FROM issues WHERE id = $1)\", [issueid])\n return result.rows[0].exists\n } \n async pushIssue(data: Issue): Promise<Issue | null> {\n try {\n await this.client.query(\n \"INSERT INTO issues (id, lenderid, borrowerid, bookid) VALUES ($1, $2, $3, $4)\",\n [data.id, data.lenderid, data.borrowerid, data.bookid]\n );\n return data;",
"score": 27.052593414400363
},
{
"filename": "src/models/IssueModel.ts",
"retrieved_chunk": " async getIssues(borrowerid: string): Promise<Array<Issue> | null> {\n try {\n let response = await this.client.query(\n \"SELECT * FROM issues WHERE borrowerid = $1\",\n [borrowerid]\n );\n return response.rows;\n } catch (error) {\n console.error(error);\n return null;",
"score": 27.037660396797442
}
] | typescript | this.client.query(query, [user.id, user.email]); |
import fs from 'fs/promises';
import inquirer from 'inquirer';
import pQueue from 'p-queue';
import { setTimeout as delay } from 'timers/promises';
import { Injectable, Logger } from '@nestjs/common';
import { Account as IAccount } from '../../interfaces/account.interface';
import { Secrets } from '../../interfaces/secrets.interface';
class Account implements IAccount {
public readonly username: string;
public readonly password: string;
public sharedSecret: string | null = null;
public identitySecret: string | null = null;
constructor(account: string) {
account = account.trim();
if (account.length === 0) throw new Error('Invalid account');
const parts = account.split(':').map((part) => part.trim());
if (parts.length < 2) throw new Error('Invalid account');
const [username, password, sharedSecret, identitySecret] = parts;
this.username = username;
this.password = password;
if (sharedSecret) this.sharedSecret = sharedSecret;
if (identitySecret) this.identitySecret = identitySecret;
}
}
@Injectable()
export class AccountsImportService {
private readonly logger = new Logger(AccountsImportService.name);
private readonly readFilesQueue = new pQueue({ concurrency: 100 });
public async loadAccounts(input: string[] | string) {
if (!input) return [];
if (!Array.isArray(input)) input = [input];
if (input.length === 0) return [];
let accounts: Account[] = [];
const errors: string[] = [];
const readResults = await Promise.all(input.map((input) => this.readAccountsFromInput(input)));
for (const result of readResults) {
accounts.push(...result.values);
errors.push(...result.errors);
}
accounts = this.removeDuplicates(accounts);
if (errors.length > 0 && accounts.length > 0) {
this.logger.warn(`The following account sources are invalid:\n${errors.join('\n')}`);
await delay(1000);
const { confirm } = await inquirer.prompt({
type: 'confirm',
name: 'confirm',
message: 'Continue with the valid accounts?',
default: false,
});
if (!confirm) throw new Error('Aborted by user');
}
return accounts;
}
public assignSecretsToAccounts(accounts: Account[], secrets: Secrets[]) {
const secretsMap = new Map<string, Secrets>();
for (const secret of secrets) {
secretsMap.set(secret.username, secret);
// some existing steam-oriented apps are case-insensitive to usernames in secrets
secretsMap.set(secret.username.toLowerCase(), secret);
}
for (const account of accounts) {
let secret = secretsMap.get(account.username);
if (!secret) secret = secretsMap.get(account.username.toLowerCase());
if (!secret) continue;
account.sharedSecret = secret.sharedSecret;
| account.identitySecret = secret.identitySecret; |
}
}
private removeDuplicates(accounts: Account[]) {
const map = new Map<string, Account>();
for (const account of accounts) map.set(account.username, account);
return [...map.values()];
}
private async readAccountsFromInput(input: string) {
const inputType = await this.inferInputType(input);
if (inputType === 'file') return this.readAccountsFromFile(input);
if (inputType === 'string') return this.readAccountFromString(input);
if (inputType === 'directory') return { values: [], errors: [input] };
}
private async readAccountsFromFile(filePath: string) {
const result: { values: Account[]; errors: string[] } = { values: [], errors: [] };
try {
let content = await this.readFilesQueue.add(() => fs.readFile(filePath, 'utf-8'));
content = content.trim();
if (content.length === 0) throw new Error('Empty file');
// session file
if (filePath.endsWith('.steamsession')) {
const readResults = this.readAccountFromSessionFile(content);
result.values.push(...readResults.values);
if (readResults.errors.length > 0) result.errors.push(filePath);
return result;
}
// asf json
if (filePath.endsWith('.json') && content.includes('"SteamLogin"')) {
const readResults = this.readAccountFromAsfJson(content);
result.values.push(...readResults.values);
if (readResults.errors.length > 0) result.errors.push(filePath);
return result;
}
// plain text
if (content.includes(':')) {
const lines = content
.split(/\s+|\r?\n/)
.map((l) => l.trim())
.filter((l) => l.length > 0);
if (lines.length === 0) throw new Error('Empty file');
for (const line of lines) {
const readResults = this.readAccountFromString(line);
result.values.push(...readResults.values);
if (readResults.errors.length > 0) result.errors.push(line);
}
return result;
}
throw new Error('Unsupported file format');
} catch (error) {
result.errors.push(filePath);
}
return result;
}
private readAccountFromString(str: string) {
const result: { values: Account[]; errors: string[] } = { values: [], errors: [] };
try {
const account = new Account(str);
result.values.push(account);
} catch (error) {
result.errors.push(str);
}
return result;
}
private readAccountFromAsfJson(fileContent: string) {
const result: { values: Account[]; errors: string[] } = { values: [], errors: [] };
try {
const { SteamLogin: username, SteamPassword: password } = JSON.parse(fileContent);
if (!username) throw new Error('Invalid username');
if (!password) throw new Error('Invalid password');
const account = new Account(`${username}:${password}`);
result.values.push(account);
} catch (error) {
result.errors.push(fileContent);
}
return result;
}
private readAccountFromSessionFile(fileContent: string) {
const result: { values: Account[]; errors: string[] } = { values: [], errors: [] };
try {
const { Username, Password, SharedSecret, IdentitySecret } = JSON.parse(fileContent);
if (!Username) throw new Error('Invalid username');
if (!Password) throw new Error('Invalid password');
const account = new Account(`${Username}:${Password}:${SharedSecret || ''}:${IdentitySecret || ''}`);
result.values.push(account);
} catch (error) {
result.errors.push(fileContent);
}
return result;
}
private async inferInputType(input: string) {
if (typeof input !== 'string') throw new Error(`Invalid input type: '${typeof input}'`);
try {
const stats = await fs.stat(input);
if (stats.isFile()) return 'file';
if (stats.isDirectory()) return 'directory';
} catch (error) {
return 'string';
}
}
}
| src/modules/accounts-import/accounts-import.service.ts | Sadzurami-steam-sessions-creator-97b7294 | [
{
"filename": "src/modules/secrets-import/secrets-import.service.ts",
"retrieved_chunk": " default: false,\n });\n if (!confirm) throw new Error('Aborted by user');\n }\n return secrets;\n }\n private removeDuplicates(secrets: Secrets[]) {\n const map = new Map<string, Secrets>();\n for (const secret of secrets) map.set(secret.username, secret);\n return [...map.values()];",
"score": 76.15985402112503
},
{
"filename": "src/modules/secrets-import/secrets-import.service.ts",
"retrieved_chunk": " public readonly identitySecret: string;\n constructor(secrets: string) {\n let parsedSecrets: any;\n try {\n parsedSecrets = JSON.parse(secrets);\n } catch (error) {}\n if (typeof parsedSecrets !== 'object' || parsedSecrets === null) throw new Error('Secrets string is invalid');\n const { shared_secret, identity_secret, account_name } = parsedSecrets;\n if (!shared_secret) throw new Error('Shared secret is missing');\n this.sharedSecret = shared_secret;",
"score": 43.630069201243515
},
{
"filename": "src/modules/secrets-import/secrets-import.service.ts",
"retrieved_chunk": " if (!identity_secret) throw new Error('Identity secret is missing');\n this.identitySecret = identity_secret;\n if (!account_name) throw new Error('Account name is missing');\n this.username = account_name;\n }\n}\n@Injectable()\nexport class SecretsImportService {\n private readonly logger = new Logger(SecretsImportService.name);\n private readonly readFilesQueue = new pQueue({ concurrency: 100 });",
"score": 42.18387332569848
},
{
"filename": "src/commands/create/create-sessions.service.ts",
"retrieved_chunk": " password: account.password,\n sharedSecret: account.sharedSecret || null,\n identitySecret: account.identitySecret || null,\n steamId,\n webRefreshToken,\n mobileRefreshToken,\n desktopRefreshToken,\n schemaVersion,\n };\n return session;",
"score": 38.64387971601955
},
{
"filename": "src/modules/secrets-import/secrets-import.service.ts",
"retrieved_chunk": " }\n secrets = this.removeDuplicates(secrets);\n if (errors.length > 0) {\n this.logger.warn(`The following secret sources are invalid:\\n${errors.join('\\n')}`);\n await delay(1000);\n const { confirm } = await inquirer.prompt({\n type: 'confirm',\n name: 'confirm',\n message:\n secrets.length > 0 ? `Continue with ${secrets.length} valid secrets?` : 'Continue without any secrets?',",
"score": 36.760459604708096
}
] | typescript | account.identitySecret = secret.identitySecret; |
import BookModel from "../models/BookModel";
import Bucket from "../models/Bucket";
import Token from "../lib/GenerateToken";
import { ERROR, MAX_EPUB_SIZE_MB } from "../common/const";
import { TokStatus, Book } from "../common/types";
import {
sendJsonResponse,
parseSimplePostData,
md5,
uuid,
} from "../common/utils";
import filetype from "file-type-cjs";
import fs from "node:fs";
import EPub from "epub";
import os from "node:os";
import path from "node:path";
import crypto from "node:crypto";
import { exec } from "node:child_process";
import http from "node:http";
async function getEpubCoverFromEpubFile_UNIX(
epubFilepath: string
): Promise<[Buffer, string] | null> {
let randomString = crypto.randomBytes(16).toString("hex");
let tempDir = path.join(os.tmpdir(), `tmp-${randomString}`);
fs.mkdirSync(tempDir);
let unzipCMD = `unzip -q ${epubFilepath} -d ${tempDir}`;
let unzipCMDExec = new Promise((resolve, reject) => {
exec(unzipCMD, (err: any, stdout: any, stderr: any) => {
if (err) reject(err);
resolve(stdout);
});
});
try {
await unzipCMDExec;
} catch (err) {
console.error(err);
fs.rmSync(tempDir, { recursive: true }); // we r good boys!
return null;
}
let findCMD = `find ${tempDir} -type f \\( -iname \\*.jpeg -o -iname \\*.jpg -o -iname \\*.png \\) | grep -Ei 'cover\\.|index-1_1'`;
let findCMDExec: Promise<string> = new Promise((resolve, reject) => {
exec(findCMD, (err: any, stdout: any, stderr: any) => {
if (err) reject(err);
resolve(stdout);
});
});
let selectedFilePath: string;
try {
selectedFilePath = await findCMDExec;
selectedFilePath = selectedFilePath.trim();
} catch (err) {
console.error(err);
fs.rmSync(tempDir, { recursive: true }); // we r good boys!
return null;
}
let ret: [Buffer, string] = [
Buffer.from(fs.readFileSync(selectedFilePath)),
selectedFilePath,
];
fs.rmSync(tempDir, { recursive: true }); // we r good boys!
return ret;
}
export default async function (
req: http.IncomingMessage,
res: http.ServerResponse
) {
const BOOK_DB = new BookModel();
const BUCKET = new Bucket();
await BOOK_DB.init();
await BUCKET.init();
try {
if (req.method === "GET") {
try {
| let userBooks = await BOOK_DB.getBooks(); |
userBooks = userBooks.map((e) => {
delete e.path;
return e;
});
sendJsonResponse(res, userBooks, 200);
} catch (error) {
console.error(error);
sendJsonResponse(res, ERROR.internalErr);
}
} else if (req.method === "POST") {
const authorization = req.headers?.authorization;
const authToken = authorization?.split(" ")?.pop();
if (!authorization || !authToken) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const token = new Token();
const tokenStatus: TokStatus = token.verify(authToken);
if (
tokenStatus === TokStatus.INVALID ||
tokenStatus === TokStatus.INVALID_SIG
) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const parsedAuthToken: any = token.UNSAFE_parse(authToken);
let epubBuffer: Buffer;
epubBuffer = await parseSimplePostData(req);
let epubSizeInMB = Math.ceil(epubBuffer.length / 1e6);
let bufferMime = await filetype.fromBuffer(epubBuffer);
if (bufferMime.mime != "application/epub+zip") {
sendJsonResponse(res, ERROR.invalidMimeForResource, 415);
return;
}
if (epubSizeInMB > MAX_EPUB_SIZE_MB) {
sendJsonResponse(res, ERROR.fileTooLarge, 400);
return;
}
let randomString = crypto.randomBytes(16).toString("hex");
const tempEpubFilePath = path.join(os.tmpdir(), `tmp-${randomString}.epub`);
fs.writeFileSync(tempEpubFilePath, epubBuffer);
const epub: any = await new Promise((resolve, reject) => {
const epub = new EPub(tempEpubFilePath);
epub.on("end", () => resolve(epub));
epub.on("error", reject);
epub.parse();
});
let epubCoverBuffer = await getEpubCoverFromEpubFile_UNIX(tempEpubFilePath);
console.log(epubCoverBuffer);
let epubSignature = md5(epubBuffer.toString("hex"));
let foundBook = await BOOK_DB.getBook("", epubSignature);
if (foundBook) {
sendJsonResponse(
res,
{
...ERROR.resourceExists,
data: {
id: foundBook.id,
},
},
409
);
return;
}
let epubFilePermalink = await BUCKET.pushBufferWithName(
epubBuffer,
`${epubSignature}.epub`
);
let epubCoverPermalink = null;
if (epubCoverBuffer) {
epubCoverPermalink = await BUCKET.pushBufferWithName(
epubCoverBuffer[0],
`${epubSignature}.${epubCoverBuffer[1].split(".").pop()}`
);
}
let epubID = uuid();
let epubEntry: Book = {
id: epubID,
userid: parsedAuthToken.id,
title: epub.metadata?.title ?? epubID.split("-").pop(),
author: epub.metadata?.creator ?? parsedAuthToken.email,
path: epubFilePermalink,
signature: epubSignature,
cover: epubCoverPermalink,
};
const pushed = await BOOK_DB.pushBook(epubEntry);
if (!pushed) {
sendJsonResponse(res, ERROR.internalErr, 500);
return;
}
sendJsonResponse(
res,
{
error: null,
message: `successfully published a book of id ${epubEntry.id}`,
data: {
id: epubEntry.id,
},
},
201
);
} else if (req.method === "DELETE") {
const authorization = req.headers?.authorization;
const authToken = authorization?.split(" ")?.pop();
if (!authorization || !authToken) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const token = new Token();
const tokenStatus: TokStatus = token.verify(authToken);
if (
tokenStatus === TokStatus.INVALID ||
tokenStatus === TokStatus.INVALID_SIG
) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const parsedAuthToken: any = token.UNSAFE_parse(authToken);
let body: Buffer;
body = await parseSimplePostData(req);
let data: any;
try {
data = JSON.parse(body.toString());
} catch {
sendJsonResponse(res, ERROR.invalidJSONData, 400);
return;
}
if (!data.bookid) {
sendJsonResponse(res, ERROR.badRequest, 400);
return;
}
let bookDeleted = await BOOK_DB.deleteBook(data.bookid, parsedAuthToken.id);
if (!bookDeleted) {
sendJsonResponse(res, {
error: "unable-to-delete-book",
message: `was unable to delete book ${data.bookid}, perhaps the id was invalid?`,
status: 404
}, 404)
return;
}
sendJsonResponse(res, {
error: null,
message: `successfully deleted book of id ${data.bookid}`,
status: 204,
data: {
id: data.bookid,
}
}, 204)
}
} finally {
await BOOK_DB.close();
}
}
| src/routes/Books.ts | Aadv1k-quillia-52c5b34 | [
{
"filename": "src/routes/Issue.ts",
"retrieved_chunk": " return;\n }\n await ISSUE_DB.init();\n await BOOK_DB.init();\n await USER_DB.init();\n const parsedAuthToken: any = token.UNSAFE_parse(authToken);\n if (req.method === \"GET\") {\n let URLParams = req.url.split(\"/\").slice(3);\n let requestedBook = URLParams?.[0];\n if (requestedBook) {",
"score": 30.25416884415312
},
{
"filename": "src/routes/Issue.ts",
"retrieved_chunk": " const BOOK_DB = new BookModel();\n const USER_DB = new UserModel();\n const authorization = req.headers?.authorization;\n const authToken = authorization?.split(\" \")?.pop()?.trim();\n try {\n if (req.method === \"OPTIONS\") {\n sendJsonResponse(res, {}, 200);\n return;\n }\n if (!authorization || !authToken) {",
"score": 25.945500337082365
},
{
"filename": "src/routes/Signup.ts",
"retrieved_chunk": ") {\n const DB = new UserModel();\n if (req.method !== \"POST\") {\n sendJsonResponse(res, ERROR.methodNotAllowed, 405);\n return;\n }\n let data: any = await parseSimplePostData(req);\n data = data.toString();\n let parsedData: User;\n try {",
"score": 15.795353531316795
},
{
"filename": "src/routes/Issue.ts",
"retrieved_chunk": " let targetBook = await BOOK_DB.getBook(requestedBook);\n if (!targetBook) {\n sendJsonResponse(res, ERROR.resourceNotExists, 404);\n return;\n }\n let epubResourcePath = targetBook.path;\n const response: Array<Buffer> = await new Promise((resolve, reject) => {\n https.get(epubResourcePath, (res) => {\n let data: Array<Buffer> = [];\n res.on(\"data\", (d: Buffer) => data.push(d));",
"score": 15.179651967311507
},
{
"filename": "src/routes/Login.ts",
"retrieved_chunk": " const DB = new UserModel();\n let data: any = await parseSimplePostData(req);\n data = data.toString();\n if (req.method !== \"POST\") {\n sendJsonResponse(res, ERROR.methodNotAllowed, 405);\n return;\n }\n let parsedData: User;\n try {\n parsedData = JSON.parse(data);",
"score": 14.931280090808782
}
] | typescript | let userBooks = await BOOK_DB.getBooks(); |
import fs from 'fs/promises';
import inquirer from 'inquirer';
import { setTimeout as delay } from 'timers/promises';
import { Injectable, Logger } from '@nestjs/common';
import { Proxy as IProxy } from '../../interfaces/proxy.interface';
class Proxy implements IProxy {
public readonly host: string;
public readonly port: number;
public readonly protocol: string;
public readonly auth?: { username: string; password: string };
constructor(proxy: string) {
if (!/^(https?|socks5?):\/\/([-\w:@.^&]+)$/.test(proxy)) throw new Error('Invalid proxy');
const url = new URL(proxy);
this.host = url.hostname;
this.port = Number(url.port);
this.protocol = url.protocol.replace(/:$/, '');
if (url.username.length > 0 && url.password.length > 0) {
this.auth = { username: url.username, password: url.password };
}
}
public toString() {
return this.auth
? `${this.protocol}://${this.auth.username}:${this.auth.password}@${this.host}:${this.port}`
: `${this.protocol}://${this.host}:${this.port}`;
}
}
@Injectable()
export class ProxiesImportService {
private readonly logger = new Logger(ProxiesImportService.name);
public async loadProxies(input: string[] | string) {
if (!input) return [];
if (!Array.isArray(input)) input = [input];
if (input.length === 0) return [];
let proxies: Proxy[] = [];
const errors: string[] = [];
const readResults = await Promise.all(input.map((input) => this.readProxyFromInput(input)));
for (const result of readResults) {
proxies.push(...result.values);
errors.push(...result.errors);
}
proxies = this.removeDuplicates(proxies);
if (errors.length > 0) {
this.logger.warn(`The following proxy sources are invalid:\n${errors.join('\n')}`);
await delay(1000);
const { confirm } = await inquirer.prompt({
type: 'confirm',
name: 'confirm',
message: proxies.length > 0 ? `Continue with ${proxies.length} valid proxies ?` : 'Continue without proxies ?',
default: false,
});
if (!confirm) throw new Error('Aborted by user');
}
return proxies;
}
private removeDuplicates(proxies: Proxy[]) {
const map = new Map<string, Proxy>();
for (const proxy of proxies) map.set(proxy.toString(), proxy);
return [...map.values()];
}
private async readProxyFromInput(input: string) {
const inputType = await this.inferInputType(input);
if (inputType === 'file') return await this.readProxyFromFile(input);
if (inputType === 'string') return this.readProxyFromString(input);
if (inputType === 'directory') return { values: [], errors: [input] };
}
private readProxyFromString(str: string) {
const result: { values: Proxy[]; errors: string[] } = { values: [], errors: [] };
try {
| const proxy = new Proxy(str); |
result.values.push(proxy);
} catch (error) {
result.errors.push(str);
}
return result;
}
private async readProxyFromFile(path: string) {
const result: { values: Proxy[]; errors: string[] } = { values: [], errors: [] };
try {
const file = await fs.readFile(path, 'utf8');
const lines = file
.split(/\s+|\r?\n/)
.map((line) => line.trim())
.filter((line) => line.length > 0);
if (lines.length === 0) throw new Error(`File '${path}' is empty`);
for (const line of lines) {
const { values, errors } = this.readProxyFromString(line);
result.values.push(...values);
result.errors.push(...errors);
}
} catch (error) {
result.errors.push(path);
}
return result;
}
private async inferInputType(input: string) {
if (typeof input !== 'string') throw new Error(`Invalid input type: '${typeof input}'`);
try {
const stats = await fs.stat(input);
if (stats.isFile()) return 'file';
if (stats.isDirectory()) return 'directory';
} catch (error) {
return 'string';
}
}
}
| src/modules/proxies-import/proxies-import.service.ts | Sadzurami-steam-sessions-creator-97b7294 | [
{
"filename": "src/modules/secrets-import/secrets-import.service.ts",
"retrieved_chunk": " }\n private async readSecretsFromInput(input: string) {\n const inputType = await this.inferInputType(input);\n if (inputType === 'file') return this.readSecretsFromFile(input);\n if (inputType === 'string') return { values: [], errors: [input] };\n if (inputType === 'directory') return { values: [], errors: [input] };\n }\n private async readSecretsFromFile(filePath: string) {\n const result: { values: Secrets[]; errors: string[] } = { values: [], errors: [] };\n try {",
"score": 77.96939409824579
},
{
"filename": "src/modules/sessions-import/sessions-import.service.ts",
"retrieved_chunk": " private async readSessionsFromInput(input: string) {\n const inputType = await this.inferInputType(input);\n if (inputType === 'file') return this.readSessionFromFile(input);\n if (inputType === 'string') return { values: [], errors: [input] };\n if (inputType === 'directory') return { values: [], errors: [input] };\n }\n private async readSessionFromFile(filePath: string) {\n const result: { values: Session[]; errors: string[] } = { values: [], errors: [] };\n try {\n let content = await this.readFilesQueue.add(() => fs.readFile(filePath, 'utf-8'));",
"score": 74.0045730666672
},
{
"filename": "src/modules/accounts-import/accounts-import.service.ts",
"retrieved_chunk": " for (const account of accounts) map.set(account.username, account);\n return [...map.values()];\n }\n private async readAccountsFromInput(input: string) {\n const inputType = await this.inferInputType(input);\n if (inputType === 'file') return this.readAccountsFromFile(input);\n if (inputType === 'string') return this.readAccountFromString(input);\n if (inputType === 'directory') return { values: [], errors: [input] };\n }\n private async readAccountsFromFile(filePath: string) {",
"score": 68.71464396624228
},
{
"filename": "src/modules/secrets-import/secrets-import.service.ts",
"retrieved_chunk": " const secrets = new Secrets(content);\n result.values.push(secrets);\n } catch (error) {\n result.errors.push(filePath);\n }\n return result;\n }\n private async inferInputType(input: string) {\n if (typeof input !== 'string') throw new Error(`Invalid input type: '${typeof input}'`);\n try {",
"score": 41.46491014433549
},
{
"filename": "src/modules/sessions-import/sessions-import.service.ts",
"retrieved_chunk": " public async loadSessions(input: string[] | string) {\n if (!input) return [];\n if (!Array.isArray(input)) input = [input];\n if (input.length === 0) return [];\n let sessions: Session[] = [];\n const errors: string[] = [];\n const readResults = await Promise.all(input.map((input) => this.readSessionsFromInput(input)));\n for (const result of readResults) {\n sessions.push(...result.values);\n errors.push(...result.errors);",
"score": 40.948057967775796
}
] | typescript | const proxy = new Proxy(str); |
import BookModel from "../models/BookModel";
import Bucket from "../models/Bucket";
import Token from "../lib/GenerateToken";
import { ERROR, MAX_EPUB_SIZE_MB } from "../common/const";
import { TokStatus, Book } from "../common/types";
import {
sendJsonResponse,
parseSimplePostData,
md5,
uuid,
} from "../common/utils";
import filetype from "file-type-cjs";
import fs from "node:fs";
import EPub from "epub";
import os from "node:os";
import path from "node:path";
import crypto from "node:crypto";
import { exec } from "node:child_process";
import http from "node:http";
async function getEpubCoverFromEpubFile_UNIX(
epubFilepath: string
): Promise<[Buffer, string] | null> {
let randomString = crypto.randomBytes(16).toString("hex");
let tempDir = path.join(os.tmpdir(), `tmp-${randomString}`);
fs.mkdirSync(tempDir);
let unzipCMD = `unzip -q ${epubFilepath} -d ${tempDir}`;
let unzipCMDExec = new Promise((resolve, reject) => {
exec(unzipCMD, (err: any, stdout: any, stderr: any) => {
if (err) reject(err);
resolve(stdout);
});
});
try {
await unzipCMDExec;
} catch (err) {
console.error(err);
fs.rmSync(tempDir, { recursive: true }); // we r good boys!
return null;
}
let findCMD = `find ${tempDir} -type f \\( -iname \\*.jpeg -o -iname \\*.jpg -o -iname \\*.png \\) | grep -Ei 'cover\\.|index-1_1'`;
let findCMDExec: Promise<string> = new Promise((resolve, reject) => {
exec(findCMD, (err: any, stdout: any, stderr: any) => {
if (err) reject(err);
resolve(stdout);
});
});
let selectedFilePath: string;
try {
selectedFilePath = await findCMDExec;
selectedFilePath = selectedFilePath.trim();
} catch (err) {
console.error(err);
fs.rmSync(tempDir, { recursive: true }); // we r good boys!
return null;
}
let ret: [Buffer, string] = [
Buffer.from(fs.readFileSync(selectedFilePath)),
selectedFilePath,
];
fs.rmSync(tempDir, { recursive: true }); // we r good boys!
return ret;
}
export default async function (
req: http.IncomingMessage,
res: http.ServerResponse
) {
const BOOK_DB = new BookModel();
const BUCKET = new Bucket();
await BOOK_DB.init();
await BUCKET.init();
try {
if (req.method === "GET") {
try {
let userBooks = await BOOK_DB.getBooks();
userBooks = userBooks.map((e) => {
delete e.path;
return e;
});
sendJsonResponse(res, userBooks, 200);
} catch (error) {
console.error(error);
| sendJsonResponse(res, ERROR.internalErr); |
}
} else if (req.method === "POST") {
const authorization = req.headers?.authorization;
const authToken = authorization?.split(" ")?.pop();
if (!authorization || !authToken) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const token = new Token();
const tokenStatus: TokStatus = token.verify(authToken);
if (
tokenStatus === TokStatus.INVALID ||
tokenStatus === TokStatus.INVALID_SIG
) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const parsedAuthToken: any = token.UNSAFE_parse(authToken);
let epubBuffer: Buffer;
epubBuffer = await parseSimplePostData(req);
let epubSizeInMB = Math.ceil(epubBuffer.length / 1e6);
let bufferMime = await filetype.fromBuffer(epubBuffer);
if (bufferMime.mime != "application/epub+zip") {
sendJsonResponse(res, ERROR.invalidMimeForResource, 415);
return;
}
if (epubSizeInMB > MAX_EPUB_SIZE_MB) {
sendJsonResponse(res, ERROR.fileTooLarge, 400);
return;
}
let randomString = crypto.randomBytes(16).toString("hex");
const tempEpubFilePath = path.join(os.tmpdir(), `tmp-${randomString}.epub`);
fs.writeFileSync(tempEpubFilePath, epubBuffer);
const epub: any = await new Promise((resolve, reject) => {
const epub = new EPub(tempEpubFilePath);
epub.on("end", () => resolve(epub));
epub.on("error", reject);
epub.parse();
});
let epubCoverBuffer = await getEpubCoverFromEpubFile_UNIX(tempEpubFilePath);
console.log(epubCoverBuffer);
let epubSignature = md5(epubBuffer.toString("hex"));
let foundBook = await BOOK_DB.getBook("", epubSignature);
if (foundBook) {
sendJsonResponse(
res,
{
...ERROR.resourceExists,
data: {
id: foundBook.id,
},
},
409
);
return;
}
let epubFilePermalink = await BUCKET.pushBufferWithName(
epubBuffer,
`${epubSignature}.epub`
);
let epubCoverPermalink = null;
if (epubCoverBuffer) {
epubCoverPermalink = await BUCKET.pushBufferWithName(
epubCoverBuffer[0],
`${epubSignature}.${epubCoverBuffer[1].split(".").pop()}`
);
}
let epubID = uuid();
let epubEntry: Book = {
id: epubID,
userid: parsedAuthToken.id,
title: epub.metadata?.title ?? epubID.split("-").pop(),
author: epub.metadata?.creator ?? parsedAuthToken.email,
path: epubFilePermalink,
signature: epubSignature,
cover: epubCoverPermalink,
};
const pushed = await BOOK_DB.pushBook(epubEntry);
if (!pushed) {
sendJsonResponse(res, ERROR.internalErr, 500);
return;
}
sendJsonResponse(
res,
{
error: null,
message: `successfully published a book of id ${epubEntry.id}`,
data: {
id: epubEntry.id,
},
},
201
);
} else if (req.method === "DELETE") {
const authorization = req.headers?.authorization;
const authToken = authorization?.split(" ")?.pop();
if (!authorization || !authToken) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const token = new Token();
const tokenStatus: TokStatus = token.verify(authToken);
if (
tokenStatus === TokStatus.INVALID ||
tokenStatus === TokStatus.INVALID_SIG
) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const parsedAuthToken: any = token.UNSAFE_parse(authToken);
let body: Buffer;
body = await parseSimplePostData(req);
let data: any;
try {
data = JSON.parse(body.toString());
} catch {
sendJsonResponse(res, ERROR.invalidJSONData, 400);
return;
}
if (!data.bookid) {
sendJsonResponse(res, ERROR.badRequest, 400);
return;
}
let bookDeleted = await BOOK_DB.deleteBook(data.bookid, parsedAuthToken.id);
if (!bookDeleted) {
sendJsonResponse(res, {
error: "unable-to-delete-book",
message: `was unable to delete book ${data.bookid}, perhaps the id was invalid?`,
status: 404
}, 404)
return;
}
sendJsonResponse(res, {
error: null,
message: `successfully deleted book of id ${data.bookid}`,
status: 204,
data: {
id: data.bookid,
}
}, 204)
}
} finally {
await BOOK_DB.close();
}
}
| src/routes/Books.ts | Aadv1k-quillia-52c5b34 | [
{
"filename": "src/models/UserModel.ts",
"retrieved_chunk": " let data = await this.client.query(`SELECT * FROM users`)\n return data.rows;\n } catch (error) {\n console.error(error);\n return null;\n }\n }\n async updateUser(user: User, toUpdate: object ) {\n let blobs = Object.keys(toUpdate).map((e, i) => {return `${e} = \\$${i+1}`})\n let toUpdateQuery = blobs.join(\" AND \");",
"score": 25.23003042717977
},
{
"filename": "src/routes/Signup.ts",
"retrieved_chunk": " sendJsonResponse(res, ERROR.internalErr, 500);\n }\n await DB.close();\n}",
"score": 13.768160053850377
},
{
"filename": "src/routes/Issue.ts",
"retrieved_chunk": " let issueData: Issue;\n try {\n let issuePostData = await parseSimplePostData(req);\n issueData = JSON.parse(issuePostData.toString());\n } catch (error) {\n console.error(error);\n sendJsonResponse(res, ERROR.badRequest, 400);\n return;\n }\n if (!issueData.lenderid || !issueData.bookid) {",
"score": 12.921835140749408
},
{
"filename": "src/routes/Issue.ts",
"retrieved_chunk": " sendJsonResponse(res, ERROR.badRequest, 400);\n return;\n }\n let foundLender = await USER_DB.getUserByID(issueData.lenderid);\n let foundBook = await BOOK_DB.getBook(issueData.bookid);\n if (!foundLender || !foundBook) {\n sendJsonResponse(res, ERROR.resourceNotExists, 404);\n return;\n }\n let foundIssue = await ISSUE_DB.getIssue(",
"score": 12.537334188913514
},
{
"filename": "src/routes/Issue.ts",
"retrieved_chunk": " let targetBook = await BOOK_DB.getBook(requestedBook);\n if (!targetBook) {\n sendJsonResponse(res, ERROR.resourceNotExists, 404);\n return;\n }\n let epubResourcePath = targetBook.path;\n const response: Array<Buffer> = await new Promise((resolve, reject) => {\n https.get(epubResourcePath, (res) => {\n let data: Array<Buffer> = [];\n res.on(\"data\", (d: Buffer) => data.push(d));",
"score": 11.683129652628725
}
] | typescript | sendJsonResponse(res, ERROR.internalErr); |
import BookModel from "../models/BookModel";
import Bucket from "../models/Bucket";
import Token from "../lib/GenerateToken";
import { ERROR, MAX_EPUB_SIZE_MB } from "../common/const";
import { TokStatus, Book } from "../common/types";
import {
sendJsonResponse,
parseSimplePostData,
md5,
uuid,
} from "../common/utils";
import filetype from "file-type-cjs";
import fs from "node:fs";
import EPub from "epub";
import os from "node:os";
import path from "node:path";
import crypto from "node:crypto";
import { exec } from "node:child_process";
import http from "node:http";
async function getEpubCoverFromEpubFile_UNIX(
epubFilepath: string
): Promise<[Buffer, string] | null> {
let randomString = crypto.randomBytes(16).toString("hex");
let tempDir = path.join(os.tmpdir(), `tmp-${randomString}`);
fs.mkdirSync(tempDir);
let unzipCMD = `unzip -q ${epubFilepath} -d ${tempDir}`;
let unzipCMDExec = new Promise((resolve, reject) => {
exec(unzipCMD, (err: any, stdout: any, stderr: any) => {
if (err) reject(err);
resolve(stdout);
});
});
try {
await unzipCMDExec;
} catch (err) {
console.error(err);
fs.rmSync(tempDir, { recursive: true }); // we r good boys!
return null;
}
let findCMD = `find ${tempDir} -type f \\( -iname \\*.jpeg -o -iname \\*.jpg -o -iname \\*.png \\) | grep -Ei 'cover\\.|index-1_1'`;
let findCMDExec: Promise<string> = new Promise((resolve, reject) => {
exec(findCMD, (err: any, stdout: any, stderr: any) => {
if (err) reject(err);
resolve(stdout);
});
});
let selectedFilePath: string;
try {
selectedFilePath = await findCMDExec;
selectedFilePath = selectedFilePath.trim();
} catch (err) {
console.error(err);
fs.rmSync(tempDir, { recursive: true }); // we r good boys!
return null;
}
let ret: [Buffer, string] = [
Buffer.from(fs.readFileSync(selectedFilePath)),
selectedFilePath,
];
fs.rmSync(tempDir, { recursive: true }); // we r good boys!
return ret;
}
export default async function (
req: http.IncomingMessage,
res: http.ServerResponse
) {
const BOOK_DB = new BookModel();
const BUCKET = new Bucket();
await BOOK_DB.init();
await BUCKET.init();
try {
if (req.method === "GET") {
try {
let userBooks = await BOOK_DB.getBooks();
userBooks = userBooks.map((e) => {
delete e.path;
return e;
});
sendJsonResponse(res, userBooks, 200);
} catch (error) {
console.error(error);
sendJsonResponse(res, ERROR.internalErr);
}
} else if (req.method === "POST") {
const authorization = req.headers?.authorization;
const authToken = authorization?.split(" ")?.pop();
if (!authorization || !authToken) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const token = new Token();
const tokenStatus: TokStatus = token.verify(authToken);
if (
tokenStatus === | TokStatus.INVALID ||
tokenStatus === TokStatus.INVALID_SIG
) { |
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const parsedAuthToken: any = token.UNSAFE_parse(authToken);
let epubBuffer: Buffer;
epubBuffer = await parseSimplePostData(req);
let epubSizeInMB = Math.ceil(epubBuffer.length / 1e6);
let bufferMime = await filetype.fromBuffer(epubBuffer);
if (bufferMime.mime != "application/epub+zip") {
sendJsonResponse(res, ERROR.invalidMimeForResource, 415);
return;
}
if (epubSizeInMB > MAX_EPUB_SIZE_MB) {
sendJsonResponse(res, ERROR.fileTooLarge, 400);
return;
}
let randomString = crypto.randomBytes(16).toString("hex");
const tempEpubFilePath = path.join(os.tmpdir(), `tmp-${randomString}.epub`);
fs.writeFileSync(tempEpubFilePath, epubBuffer);
const epub: any = await new Promise((resolve, reject) => {
const epub = new EPub(tempEpubFilePath);
epub.on("end", () => resolve(epub));
epub.on("error", reject);
epub.parse();
});
let epubCoverBuffer = await getEpubCoverFromEpubFile_UNIX(tempEpubFilePath);
console.log(epubCoverBuffer);
let epubSignature = md5(epubBuffer.toString("hex"));
let foundBook = await BOOK_DB.getBook("", epubSignature);
if (foundBook) {
sendJsonResponse(
res,
{
...ERROR.resourceExists,
data: {
id: foundBook.id,
},
},
409
);
return;
}
let epubFilePermalink = await BUCKET.pushBufferWithName(
epubBuffer,
`${epubSignature}.epub`
);
let epubCoverPermalink = null;
if (epubCoverBuffer) {
epubCoverPermalink = await BUCKET.pushBufferWithName(
epubCoverBuffer[0],
`${epubSignature}.${epubCoverBuffer[1].split(".").pop()}`
);
}
let epubID = uuid();
let epubEntry: Book = {
id: epubID,
userid: parsedAuthToken.id,
title: epub.metadata?.title ?? epubID.split("-").pop(),
author: epub.metadata?.creator ?? parsedAuthToken.email,
path: epubFilePermalink,
signature: epubSignature,
cover: epubCoverPermalink,
};
const pushed = await BOOK_DB.pushBook(epubEntry);
if (!pushed) {
sendJsonResponse(res, ERROR.internalErr, 500);
return;
}
sendJsonResponse(
res,
{
error: null,
message: `successfully published a book of id ${epubEntry.id}`,
data: {
id: epubEntry.id,
},
},
201
);
} else if (req.method === "DELETE") {
const authorization = req.headers?.authorization;
const authToken = authorization?.split(" ")?.pop();
if (!authorization || !authToken) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const token = new Token();
const tokenStatus: TokStatus = token.verify(authToken);
if (
tokenStatus === TokStatus.INVALID ||
tokenStatus === TokStatus.INVALID_SIG
) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const parsedAuthToken: any = token.UNSAFE_parse(authToken);
let body: Buffer;
body = await parseSimplePostData(req);
let data: any;
try {
data = JSON.parse(body.toString());
} catch {
sendJsonResponse(res, ERROR.invalidJSONData, 400);
return;
}
if (!data.bookid) {
sendJsonResponse(res, ERROR.badRequest, 400);
return;
}
let bookDeleted = await BOOK_DB.deleteBook(data.bookid, parsedAuthToken.id);
if (!bookDeleted) {
sendJsonResponse(res, {
error: "unable-to-delete-book",
message: `was unable to delete book ${data.bookid}, perhaps the id was invalid?`,
status: 404
}, 404)
return;
}
sendJsonResponse(res, {
error: null,
message: `successfully deleted book of id ${data.bookid}`,
status: 204,
data: {
id: data.bookid,
}
}, 204)
}
} finally {
await BOOK_DB.close();
}
}
| src/routes/Books.ts | Aadv1k-quillia-52c5b34 | [
{
"filename": "src/routes/Issue.ts",
"retrieved_chunk": " sendJsonResponse(res, ERROR.unauthorized, 401);\n return;\n }\n const token = new Token();\n const tokenStatus: TokStatus = token.verify(authToken);\n if (\n tokenStatus === TokStatus.INVALID ||\n tokenStatus === TokStatus.INVALID_SIG\n ) {\n sendJsonResponse(res, ERROR.unauthorized, 401);",
"score": 75.1693222289287
},
{
"filename": "src/routes/Login.ts",
"retrieved_chunk": " }\n if (md5(parsedData.password) !== foundUser.password) {\n sendJsonResponse(res, ERROR.unauthorized, 401);\n return;\n }\n const token = new Token();\n const { password, ...tokenBody} = foundUser;\n let accessToken = token.generate(tokenBody);\n sendJsonResponse(res, {\n messaged: \"found the given user\",",
"score": 22.136088751079942
},
{
"filename": "src/common/types.ts",
"retrieved_chunk": "export enum TokStatus {\n EXPIRED,\n INVALID,\n INVALID_SIG,\n VALID\n}",
"score": 22.120072665392215
},
{
"filename": "src/lib/GenerateToken.ts",
"retrieved_chunk": " }\n let b64Head = Buffer.from(JSON.stringify(head)).toString(\"base64\").replace(/=/g, \"\");\n let b64Body = Buffer.from(JSON.stringify(body)).toString(\"base64\").replace(/=/g, \"\");\n let signature = this.sign(`${b64Head}.${b64Body}`);\n return `${b64Head}.${b64Body}.${signature}`\n }\n verify(token: string): TokStatus {\n let [head, body, signature] = token.split('.');\n if (!head || !body || !signature) {\n return TokStatus.INVALID;",
"score": 20.583270722939126
},
{
"filename": "src/lib/GenerateToken.ts",
"retrieved_chunk": " }\n if (this.sign(`${head}.${body}`) !== signature) {\n return TokStatus.INVALID_SIG\n }\n let decodedBody = Buffer.from(body, \"base64\").toString(\"utf-8\");\n const curTime = Math.floor(Date.now() / 1000);\n if (JSON.parse(decodedBody)?.exp > curTime) {\n return TokStatus.EXPIRED;\n }\n return TokStatus.VALID",
"score": 17.525933545181623
}
] | typescript | TokStatus.INVALID ||
tokenStatus === TokStatus.INVALID_SIG
) { |
import BookModel from "../models/BookModel";
import Bucket from "../models/Bucket";
import Token from "../lib/GenerateToken";
import { ERROR, MAX_EPUB_SIZE_MB } from "../common/const";
import { TokStatus, Book } from "../common/types";
import {
sendJsonResponse,
parseSimplePostData,
md5,
uuid,
} from "../common/utils";
import filetype from "file-type-cjs";
import fs from "node:fs";
import EPub from "epub";
import os from "node:os";
import path from "node:path";
import crypto from "node:crypto";
import { exec } from "node:child_process";
import http from "node:http";
async function getEpubCoverFromEpubFile_UNIX(
epubFilepath: string
): Promise<[Buffer, string] | null> {
let randomString = crypto.randomBytes(16).toString("hex");
let tempDir = path.join(os.tmpdir(), `tmp-${randomString}`);
fs.mkdirSync(tempDir);
let unzipCMD = `unzip -q ${epubFilepath} -d ${tempDir}`;
let unzipCMDExec = new Promise((resolve, reject) => {
exec(unzipCMD, (err: any, stdout: any, stderr: any) => {
if (err) reject(err);
resolve(stdout);
});
});
try {
await unzipCMDExec;
} catch (err) {
console.error(err);
fs.rmSync(tempDir, { recursive: true }); // we r good boys!
return null;
}
let findCMD = `find ${tempDir} -type f \\( -iname \\*.jpeg -o -iname \\*.jpg -o -iname \\*.png \\) | grep -Ei 'cover\\.|index-1_1'`;
let findCMDExec: Promise<string> = new Promise((resolve, reject) => {
exec(findCMD, (err: any, stdout: any, stderr: any) => {
if (err) reject(err);
resolve(stdout);
});
});
let selectedFilePath: string;
try {
selectedFilePath = await findCMDExec;
selectedFilePath = selectedFilePath.trim();
} catch (err) {
console.error(err);
fs.rmSync(tempDir, { recursive: true }); // we r good boys!
return null;
}
let ret: [Buffer, string] = [
Buffer.from(fs.readFileSync(selectedFilePath)),
selectedFilePath,
];
fs.rmSync(tempDir, { recursive: true }); // we r good boys!
return ret;
}
export default async function (
req: http.IncomingMessage,
res: http.ServerResponse
) {
const BOOK_DB = new BookModel();
const BUCKET = new Bucket();
await BOOK_DB.init();
| await BUCKET.init(); |
try {
if (req.method === "GET") {
try {
let userBooks = await BOOK_DB.getBooks();
userBooks = userBooks.map((e) => {
delete e.path;
return e;
});
sendJsonResponse(res, userBooks, 200);
} catch (error) {
console.error(error);
sendJsonResponse(res, ERROR.internalErr);
}
} else if (req.method === "POST") {
const authorization = req.headers?.authorization;
const authToken = authorization?.split(" ")?.pop();
if (!authorization || !authToken) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const token = new Token();
const tokenStatus: TokStatus = token.verify(authToken);
if (
tokenStatus === TokStatus.INVALID ||
tokenStatus === TokStatus.INVALID_SIG
) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const parsedAuthToken: any = token.UNSAFE_parse(authToken);
let epubBuffer: Buffer;
epubBuffer = await parseSimplePostData(req);
let epubSizeInMB = Math.ceil(epubBuffer.length / 1e6);
let bufferMime = await filetype.fromBuffer(epubBuffer);
if (bufferMime.mime != "application/epub+zip") {
sendJsonResponse(res, ERROR.invalidMimeForResource, 415);
return;
}
if (epubSizeInMB > MAX_EPUB_SIZE_MB) {
sendJsonResponse(res, ERROR.fileTooLarge, 400);
return;
}
let randomString = crypto.randomBytes(16).toString("hex");
const tempEpubFilePath = path.join(os.tmpdir(), `tmp-${randomString}.epub`);
fs.writeFileSync(tempEpubFilePath, epubBuffer);
const epub: any = await new Promise((resolve, reject) => {
const epub = new EPub(tempEpubFilePath);
epub.on("end", () => resolve(epub));
epub.on("error", reject);
epub.parse();
});
let epubCoverBuffer = await getEpubCoverFromEpubFile_UNIX(tempEpubFilePath);
console.log(epubCoverBuffer);
let epubSignature = md5(epubBuffer.toString("hex"));
let foundBook = await BOOK_DB.getBook("", epubSignature);
if (foundBook) {
sendJsonResponse(
res,
{
...ERROR.resourceExists,
data: {
id: foundBook.id,
},
},
409
);
return;
}
let epubFilePermalink = await BUCKET.pushBufferWithName(
epubBuffer,
`${epubSignature}.epub`
);
let epubCoverPermalink = null;
if (epubCoverBuffer) {
epubCoverPermalink = await BUCKET.pushBufferWithName(
epubCoverBuffer[0],
`${epubSignature}.${epubCoverBuffer[1].split(".").pop()}`
);
}
let epubID = uuid();
let epubEntry: Book = {
id: epubID,
userid: parsedAuthToken.id,
title: epub.metadata?.title ?? epubID.split("-").pop(),
author: epub.metadata?.creator ?? parsedAuthToken.email,
path: epubFilePermalink,
signature: epubSignature,
cover: epubCoverPermalink,
};
const pushed = await BOOK_DB.pushBook(epubEntry);
if (!pushed) {
sendJsonResponse(res, ERROR.internalErr, 500);
return;
}
sendJsonResponse(
res,
{
error: null,
message: `successfully published a book of id ${epubEntry.id}`,
data: {
id: epubEntry.id,
},
},
201
);
} else if (req.method === "DELETE") {
const authorization = req.headers?.authorization;
const authToken = authorization?.split(" ")?.pop();
if (!authorization || !authToken) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const token = new Token();
const tokenStatus: TokStatus = token.verify(authToken);
if (
tokenStatus === TokStatus.INVALID ||
tokenStatus === TokStatus.INVALID_SIG
) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const parsedAuthToken: any = token.UNSAFE_parse(authToken);
let body: Buffer;
body = await parseSimplePostData(req);
let data: any;
try {
data = JSON.parse(body.toString());
} catch {
sendJsonResponse(res, ERROR.invalidJSONData, 400);
return;
}
if (!data.bookid) {
sendJsonResponse(res, ERROR.badRequest, 400);
return;
}
let bookDeleted = await BOOK_DB.deleteBook(data.bookid, parsedAuthToken.id);
if (!bookDeleted) {
sendJsonResponse(res, {
error: "unable-to-delete-book",
message: `was unable to delete book ${data.bookid}, perhaps the id was invalid?`,
status: 404
}, 404)
return;
}
sendJsonResponse(res, {
error: null,
message: `successfully deleted book of id ${data.bookid}`,
status: 204,
data: {
id: data.bookid,
}
}, 204)
}
} finally {
await BOOK_DB.close();
}
}
| src/routes/Books.ts | Aadv1k-quillia-52c5b34 | [
{
"filename": "src/routes/Issue.ts",
"retrieved_chunk": " uuid,\n getBufferFromRawURL,\n} from \"../common/utils\";\nimport http from \"node:http\";\nimport https from \"node:https\";\nexport default async function (\n req: http.IncomingMessage,\n res: http.ServerResponse\n) {\n const ISSUE_DB = new IssueModel();",
"score": 24.80824831581957
},
{
"filename": "src/routes/Login.ts",
"retrieved_chunk": "import http from \"node:http\";\nimport { sendJsonResponse, md5, parseSimplePostData } from \"../common/utils\";\nimport Token from \"../lib/GenerateToken\";\nimport { ERROR } from \"../common/const\";\nimport UserModel from \"../models/UserModel\";\nimport { User } from \"../common/types\";\nexport default async function (\n req: http.IncomingMessage,\n res: http.ServerResponse\n) {",
"score": 19.15940983223921
},
{
"filename": "src/server.ts",
"retrieved_chunk": "import http from \"node:http\";\nimport { sendPublicFile } from \"./common/utils\";\nimport RouteSignup from \"./routes/Signup\"\nimport RouteLogin from \"./routes/Login\";\nimport RouteBooks from \"./routes/Books\";\nimport RouteIssue from \"./routes/Issue\";\nexport default http.createServer( async (req: http.IncomingMessage, res: http.ServerResponse) => {\n const url: string = new URL(`https://foo.com${req.url}`).pathname;\n if (url === \"/\") {\n sendPublicFile(res, \"index.html\");",
"score": 18.69101416035368
},
{
"filename": "src/routes/Issue.ts",
"retrieved_chunk": " return;\n }\n await ISSUE_DB.init();\n await BOOK_DB.init();\n await USER_DB.init();\n const parsedAuthToken: any = token.UNSAFE_parse(authToken);\n if (req.method === \"GET\") {\n let URLParams = req.url.split(\"/\").slice(3);\n let requestedBook = URLParams?.[0];\n if (requestedBook) {",
"score": 18.505093238921248
},
{
"filename": "src/routes/Signup.ts",
"retrieved_chunk": "import http from \"node:http\";\nimport { sendJsonResponse, md5, uuid, parseSimplePostData } from \"../common/utils\";\nimport { ERROR } from \"../common/const\";\nimport { User } from \"../common/types\";\nimport UserModel from \"../models/UserModel\";\nimport Token from \"../lib/GenerateToken\";\nimport isEmailValid from \"../lib/isEmailValid\";\nexport default async function (\n req: http.IncomingMessage,\n res: http.ServerResponse",
"score": 18.06332426978546
}
] | typescript | await BUCKET.init(); |
import { Cache } from 'cache-manager';
import pEvent from 'p-event';
import { EAuthTokenPlatformType, EResult, LoginSession } from 'steam-session';
import SteamTotp from 'steam-totp';
import { CACHE_MANAGER } from '@nestjs/cache-manager';
import { Inject, Injectable } from '@nestjs/common';
import { Account } from '../../interfaces/account.interface';
import { ProxiesService } from '../proxies/proxies.service';
@Injectable()
export class SteamTokensService {
private readonly connectionThrottlingTimeout = 31 * 1000;
constructor(
@Inject(CACHE_MANAGER) private throttledConnections: Cache,
private readonly proxiesService: ProxiesService,
) {}
public async createRefreshToken(account: Account, platform: 'web' | 'mobile' | 'desktop') {
const loginSessionPlatform = this.inferLoginSessionPlatform(platform);
const proxy = await this.proxiesService.getProxy();
const connectionId = this.inferConnectionId((proxy || '').toString());
await this.waitConnectionLimitReset(connectionId);
this.throttleConnection(connectionId, this.connectionThrottlingTimeout);
const loginSessionOptions = {};
if (proxy) loginSessionOptions[proxy.protocol.includes('socks') ? 'socksProxy' : 'httpProxy'] = proxy.toString();
const loginSession = new LoginSession(loginSessionPlatform, loginSessionOptions);
loginSession.on('error', () => {}); // fallback errors handling
try {
const credentials = { accountName: account.username, password: account.password } as any;
if (account.sharedSecret) credentials.steamGuardCode = SteamTotp.getAuthCode(account.sharedSecret);
loginSession
.startWithCredentials(credentials)
.then((result) => result.actionRequired && loginSession.emit('error', new Error('Guard action required')))
.catch((error) => loginSession.emit('error', error));
await pEvent(loginSession, 'authenticated', { rejectionEvents: ['error', 'timeout'], timeout: 35000 });
const refreshToken = loginSession.refreshToken;
if (!refreshToken) throw new Error('Refresh token is empty');
return refreshToken;
} catch (error) {
if (error.eresult === EResult.RateLimitExceeded) this.throttleConnection(connectionId, 31 * 60 * 1000);
throw new Error('Failed to create refresh token', { cause: error });
} finally {
if (loginSession) loginSession.cancelLoginAttempt();
}
}
public decodeRefreshToken(token: string) {
try {
const parts = token.split('.');
if (parts.length !== 3) throw new Error('Invalid token');
const headerBase64Url = parts[1];
const headerBase64 = headerBase64Url.replace(/-/g, '+').replace(/_/g, '/');
const headerJson = Buffer.from(headerBase64, 'base64').toString('utf-8');
return JSON.parse(headerJson);
} catch (error) {
throw new Error('An error occurred while decoding refresh token', { cause: error });
}
}
public validateRefreshToken(token: string) {
try {
const { iss, sub, exp, aud } = this.decodeRefreshToken(token);
if (!iss || !sub || !exp || !aud) return false;
if (iss !== 'steam') return false;
if (exp < Math.floor(Date.now() / 1000)) return false;
if (!aud.includes('renew')) return false;
return true;
} catch (error) {
return false;
}
}
public getRefreshTokenExpiration(token: string) {
try {
const { exp } = this.decodeRefreshToken(token);
return exp * 1000;
} catch (error) {
return 0;
}
}
private inferLoginSessionPlatform(platform: 'web' | 'mobile' | 'desktop'): EAuthTokenPlatformType {
if (platform === 'web') return EAuthTokenPlatformType.WebBrowser;
else if (platform === 'mobile') return EAuthTokenPlatformType.MobileApp;
else if (platform === 'desktop') return EAuthTokenPlatformType.SteamClient;
else throw new Error('Invalid platform');
}
private inferConnectionId(id?: string) {
return `${SteamTokensService.name}:${id || 'localhost'}`;
}
private throttleConnection(connectionId: string, timeoutMs: number) {
connectionId = this.inferConnectionId(connectionId);
this.throttledConnections.set(connectionId, true, timeoutMs);
if | (this.inferConnectionId() !== connectionId) this.proxiesService.throttleProxy(connectionId, timeoutMs); |
}
private async waitConnectionLimitReset(connectionId: string) {
connectionId = this.inferConnectionId(connectionId);
const execute = () => {
if (this.throttledConnections.get(connectionId)) return false;
this.throttleConnection(connectionId, 1000);
return true;
};
if (execute()) return;
return new Promise<void>((resolve) => {
const interval = setInterval(() => {
if (!execute()) return;
clearInterval(interval);
resolve();
}, 1000);
});
}
}
| src/modules/steam-tokens/steam-tokens.service.ts | Sadzurami-steam-sessions-creator-97b7294 | [
{
"filename": "src/modules/proxies/proxies.service.ts",
"retrieved_chunk": " return proxy;\n }\n public getProxiesCount() {\n return this.proxies.size;\n }\n public throttleProxy(proxy: Proxy | string, timeoutMs?: number) {\n const proxyId = this.getProxyId(proxy);\n this.throttledProxies.set(proxyId, true, timeoutMs);\n }\n private async fetchProxy() {",
"score": 37.83799780015547
},
{
"filename": "src/commands/create/create-sessions.service.ts",
"retrieved_chunk": " } catch (error) {\n throw new Error('Failed to create session', { cause: error });\n }\n }\n private async createRefreshToken(account: Account, platform: 'web' | 'mobile' | 'desktop') {\n try {\n return await pRetry(() => this.steamTokensService.createRefreshToken(account, platform), {\n retries: 3,\n minTimeout: 31000,\n maxTimeout: 31000,",
"score": 11.498863028296178
},
{
"filename": "src/app.service.ts",
"retrieved_chunk": " }\n private setProcessTitle(title: string) {\n if (process.title === title) return;\n if (process.platform === 'win32') process.title = title;\n else process.stdout.write(`\\x1b]2;${title}\\x1b\\x5c`);\n }\n public subscribeToShutdown(shutdownFn: () => void): void {\n this.shutdownListener$.subscribe(() => shutdownFn());\n }\n private catchExceptions() {",
"score": 11.248962722748248
},
{
"filename": "src/modules/export-sessions/export-sessions.service.ts",
"retrieved_chunk": " if (!directory || typeof directory !== 'string') throw new Error('Invalid output path');\n if (!path.isAbsolute(directory)) throw new Error('Output path must be absolute');\n try {\n await fs.mkdir(directory, { recursive: true });\n } catch (error) {\n throw new Error('Failed to create output directory', { cause: error });\n }\n this.outputPath = directory;\n }\n public async exportSession(session: Session) {",
"score": 10.56674643957632
},
{
"filename": "src/modules/secrets-import/secrets-import.service.ts",
"retrieved_chunk": " if (!identity_secret) throw new Error('Identity secret is missing');\n this.identitySecret = identity_secret;\n if (!account_name) throw new Error('Account name is missing');\n this.username = account_name;\n }\n}\n@Injectable()\nexport class SecretsImportService {\n private readonly logger = new Logger(SecretsImportService.name);\n private readonly readFilesQueue = new pQueue({ concurrency: 100 });",
"score": 10.5466517801227
}
] | typescript | (this.inferConnectionId() !== connectionId) this.proxiesService.throttleProxy(connectionId, timeoutMs); |
import BookModel from "../models/BookModel";
import Bucket from "../models/Bucket";
import Token from "../lib/GenerateToken";
import { ERROR, MAX_EPUB_SIZE_MB } from "../common/const";
import { TokStatus, Book } from "../common/types";
import {
sendJsonResponse,
parseSimplePostData,
md5,
uuid,
} from "../common/utils";
import filetype from "file-type-cjs";
import fs from "node:fs";
import EPub from "epub";
import os from "node:os";
import path from "node:path";
import crypto from "node:crypto";
import { exec } from "node:child_process";
import http from "node:http";
async function getEpubCoverFromEpubFile_UNIX(
epubFilepath: string
): Promise<[Buffer, string] | null> {
let randomString = crypto.randomBytes(16).toString("hex");
let tempDir = path.join(os.tmpdir(), `tmp-${randomString}`);
fs.mkdirSync(tempDir);
let unzipCMD = `unzip -q ${epubFilepath} -d ${tempDir}`;
let unzipCMDExec = new Promise((resolve, reject) => {
exec(unzipCMD, (err: any, stdout: any, stderr: any) => {
if (err) reject(err);
resolve(stdout);
});
});
try {
await unzipCMDExec;
} catch (err) {
console.error(err);
fs.rmSync(tempDir, { recursive: true }); // we r good boys!
return null;
}
let findCMD = `find ${tempDir} -type f \\( -iname \\*.jpeg -o -iname \\*.jpg -o -iname \\*.png \\) | grep -Ei 'cover\\.|index-1_1'`;
let findCMDExec: Promise<string> = new Promise((resolve, reject) => {
exec(findCMD, (err: any, stdout: any, stderr: any) => {
if (err) reject(err);
resolve(stdout);
});
});
let selectedFilePath: string;
try {
selectedFilePath = await findCMDExec;
selectedFilePath = selectedFilePath.trim();
} catch (err) {
console.error(err);
fs.rmSync(tempDir, { recursive: true }); // we r good boys!
return null;
}
let ret: [Buffer, string] = [
Buffer.from(fs.readFileSync(selectedFilePath)),
selectedFilePath,
];
fs.rmSync(tempDir, { recursive: true }); // we r good boys!
return ret;
}
export default async function (
req: http.IncomingMessage,
res: http.ServerResponse
) {
const BOOK_DB = new BookModel();
const BUCKET = new Bucket();
await BOOK_DB.init();
await BUCKET.init();
try {
if (req.method === "GET") {
try {
let userBooks = await BOOK_DB.getBooks();
userBooks = userBooks.map((e) => {
delete e.path;
return e;
});
sendJsonResponse(res, userBooks, 200);
} catch (error) {
console.error(error);
sendJsonResponse(res, ERROR.internalErr);
}
} else if (req.method === "POST") {
const authorization = req.headers?.authorization;
const authToken = authorization?.split(" ")?.pop();
if (!authorization || !authToken) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const token = new Token();
const tokenStatus: TokStatus = token.verify(authToken);
if (
tokenStatus === TokStatus.INVALID ||
tokenStatus === TokStatus.INVALID_SIG
) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const parsedAuthToken: any = token.UNSAFE_parse(authToken);
let epubBuffer: Buffer;
epubBuffer = await parseSimplePostData(req);
let epubSizeInMB = Math.ceil(epubBuffer.length / 1e6);
let bufferMime = await filetype.fromBuffer(epubBuffer);
if (bufferMime.mime != "application/epub+zip") {
sendJsonResponse(res, ERROR.invalidMimeForResource, 415);
return;
}
if (epubSizeInMB > MAX_EPUB_SIZE_MB) {
| sendJsonResponse(res, ERROR.fileTooLarge, 400); |
return;
}
let randomString = crypto.randomBytes(16).toString("hex");
const tempEpubFilePath = path.join(os.tmpdir(), `tmp-${randomString}.epub`);
fs.writeFileSync(tempEpubFilePath, epubBuffer);
const epub: any = await new Promise((resolve, reject) => {
const epub = new EPub(tempEpubFilePath);
epub.on("end", () => resolve(epub));
epub.on("error", reject);
epub.parse();
});
let epubCoverBuffer = await getEpubCoverFromEpubFile_UNIX(tempEpubFilePath);
console.log(epubCoverBuffer);
let epubSignature = md5(epubBuffer.toString("hex"));
let foundBook = await BOOK_DB.getBook("", epubSignature);
if (foundBook) {
sendJsonResponse(
res,
{
...ERROR.resourceExists,
data: {
id: foundBook.id,
},
},
409
);
return;
}
let epubFilePermalink = await BUCKET.pushBufferWithName(
epubBuffer,
`${epubSignature}.epub`
);
let epubCoverPermalink = null;
if (epubCoverBuffer) {
epubCoverPermalink = await BUCKET.pushBufferWithName(
epubCoverBuffer[0],
`${epubSignature}.${epubCoverBuffer[1].split(".").pop()}`
);
}
let epubID = uuid();
let epubEntry: Book = {
id: epubID,
userid: parsedAuthToken.id,
title: epub.metadata?.title ?? epubID.split("-").pop(),
author: epub.metadata?.creator ?? parsedAuthToken.email,
path: epubFilePermalink,
signature: epubSignature,
cover: epubCoverPermalink,
};
const pushed = await BOOK_DB.pushBook(epubEntry);
if (!pushed) {
sendJsonResponse(res, ERROR.internalErr, 500);
return;
}
sendJsonResponse(
res,
{
error: null,
message: `successfully published a book of id ${epubEntry.id}`,
data: {
id: epubEntry.id,
},
},
201
);
} else if (req.method === "DELETE") {
const authorization = req.headers?.authorization;
const authToken = authorization?.split(" ")?.pop();
if (!authorization || !authToken) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const token = new Token();
const tokenStatus: TokStatus = token.verify(authToken);
if (
tokenStatus === TokStatus.INVALID ||
tokenStatus === TokStatus.INVALID_SIG
) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const parsedAuthToken: any = token.UNSAFE_parse(authToken);
let body: Buffer;
body = await parseSimplePostData(req);
let data: any;
try {
data = JSON.parse(body.toString());
} catch {
sendJsonResponse(res, ERROR.invalidJSONData, 400);
return;
}
if (!data.bookid) {
sendJsonResponse(res, ERROR.badRequest, 400);
return;
}
let bookDeleted = await BOOK_DB.deleteBook(data.bookid, parsedAuthToken.id);
if (!bookDeleted) {
sendJsonResponse(res, {
error: "unable-to-delete-book",
message: `was unable to delete book ${data.bookid}, perhaps the id was invalid?`,
status: 404
}, 404)
return;
}
sendJsonResponse(res, {
error: null,
message: `successfully deleted book of id ${data.bookid}`,
status: 204,
data: {
id: data.bookid,
}
}, 204)
}
} finally {
await BOOK_DB.close();
}
}
| src/routes/Books.ts | Aadv1k-quillia-52c5b34 | [
{
"filename": "src/common/utils.ts",
"retrieved_chunk": " res.writeHead(code ?? 200, {\n \"Content-type\": \"application/epub+zip\"\n });\n res.write(epubBuffer);\n}\nexport function uuid(): string {\n const nid = nanoid.customAlphabet(\"1234567890abcdef\", 10);\n let id = nid();\n return id;\n}",
"score": 31.156835260745876
},
{
"filename": "src/routes/Issue.ts",
"retrieved_chunk": " res.on(\"end\", () => resolve(data));\n res.on(\"error\", (error) => reject(error));\n });\n });\n let epubBuffer = Buffer.concat(response);\n sendEpubResponse(res, epubBuffer);\n return;\n } else {\n let userIssues = await ISSUE_DB.getIssues(parsedAuthToken.id);\n if (!userIssues) {",
"score": 30.120481461387076
},
{
"filename": "src/routes/Issue.ts",
"retrieved_chunk": " sendJsonResponse(res, ERROR.resourceNotExists, 404);\n } else {\n sendJsonResponse(res, userIssues, 200);\n }\n }\n } else if (req.method === \"POST\") {\n if (req.headers?.[\"content-type\"] != \"application/json\") {\n sendJsonResponse(res, ERROR.invalidMimeForResource, 415);\n return;\n }",
"score": 26.94511645056364
},
{
"filename": "src/routes/Issue.ts",
"retrieved_chunk": " let issueData: Issue;\n try {\n let issuePostData = await parseSimplePostData(req);\n issueData = JSON.parse(issuePostData.toString());\n } catch (error) {\n console.error(error);\n sendJsonResponse(res, ERROR.badRequest, 400);\n return;\n }\n if (!issueData.lenderid || !issueData.bookid) {",
"score": 20.215060400427404
},
{
"filename": "src/routes/Signup.ts",
"retrieved_chunk": " sendJsonResponse(res, ERROR.badRequest, 400);\n return;\n }\n await DB.init();\n let foundUser = await DB.getUser(parsedData.email);\n if (foundUser) {\n sendJsonResponse(res, ERROR.userAlreadyExists, 409)\n return;\n }\n let user: User = {",
"score": 20.09100879078356
}
] | typescript | sendJsonResponse(res, ERROR.fileTooLarge, 400); |
import fs from 'fs/promises';
import inquirer from 'inquirer';
import pQueue from 'p-queue';
import path from 'path';
import { setTimeout as delay } from 'timers/promises';
import { Injectable, Logger } from '@nestjs/common';
import { Secrets as ISecrets } from '../../interfaces/secrets.interface';
class Secrets implements ISecrets {
public readonly username: string;
public readonly sharedSecret: string;
public readonly identitySecret: string;
constructor(secrets: string) {
let parsedSecrets: any;
try {
parsedSecrets = JSON.parse(secrets);
} catch (error) {}
if (typeof parsedSecrets !== 'object' || parsedSecrets === null) throw new Error('Secrets string is invalid');
const { shared_secret, identity_secret, account_name } = parsedSecrets;
if (!shared_secret) throw new Error('Shared secret is missing');
this.sharedSecret = shared_secret;
if (!identity_secret) throw new Error('Identity secret is missing');
this.identitySecret = identity_secret;
if (!account_name) throw new Error('Account name is missing');
this.username = account_name;
}
}
@Injectable()
export class SecretsImportService {
private readonly logger = new Logger(SecretsImportService.name);
private readonly readFilesQueue = new pQueue({ concurrency: 100 });
public async loadSecrets(input: string[] | string) {
if (!input) return [];
if (!Array.isArray(input)) input = [input];
if (input.length === 0) return [];
let secrets: Secrets[] = [];
const errors: string[] = [];
const readResults = await Promise.all(input.map((input) => this.readSecretsFromInput(input)));
for (const result of readResults) {
secrets.push(...result.values);
errors.push(...result.errors);
}
secrets = this.removeDuplicates(secrets);
if (errors.length > 0) {
this.logger.warn(`The following secret sources are invalid:\n${errors.join('\n')}`);
await delay(1000);
const { confirm } = await inquirer.prompt({
type: 'confirm',
name: 'confirm',
message:
secrets.length > 0 ? `Continue with ${secrets.length} valid secrets?` : 'Continue without any secrets?',
default: false,
});
if (!confirm) throw new Error('Aborted by user');
}
return secrets;
}
private removeDuplicates(secrets: Secrets[]) {
const map = new Map<string, Secrets>();
for (const secret of secrets) map.set(secret.username, secret);
return [...map.values()];
}
private async readSecretsFromInput(input: string) {
const inputType = await this.inferInputType(input);
if (inputType === 'file') return this.readSecretsFromFile(input);
if (inputType === 'string') return { values: [], errors: [input] };
if (inputType === 'directory') return { values: [], errors: [input] };
}
private async readSecretsFromFile(filePath: string) {
const result: { values: Secrets[]; errors: string[] } = { values: [], errors: [] };
try {
const fileExtension = path.extname(filePath);
// mafile
if (fileExtension.toLowerCase() === '.mafile') {
const readResult = await this.readSecretsFromMaFile(filePath);
result.values.push(...readResult.values);
if (readResult.errors.length > 0) result.errors.push(filePath);
return result;
}
// asf db
if (fileExtension === '.db') {
const readResult = await this.readSecretsFromAsfDbFile(filePath);
result.values.push(...readResult.values);
if (readResult.errors.length > 0) result.errors.push(filePath);
return result;
}
throw new Error('Unsupported file format');
} catch (error) {
result.errors.push(filePath);
}
return result;
}
private async readSecretsFromMaFile(filePath: string) {
const result: { values: Secrets[]; errors: string[] } = { values: [], errors: [] };
try {
let content = await this.readFilesQueue.add(() => fs.readFile(filePath, 'utf-8'));
content = content.trim().replace(/},\s*}/g, '}}');
const secrets = | new Secrets(content); |
result.values.push(secrets);
} catch (error) {
result.errors.push(filePath);
}
return result;
}
private async readSecretsFromAsfDbFile(filePath: string) {
const result: { values: Secrets[]; errors: string[] } = { values: [], errors: [] };
try {
let content = await this.readFilesQueue.add(() => fs.readFile(filePath, 'utf-8'));
const parsedContent = JSON.parse(content)['_MobileAuthenticator'];
parsedContent['account_name'] = path.basename(filePath, path.extname(filePath));
content = JSON.stringify(parsedContent);
const secrets = new Secrets(content);
result.values.push(secrets);
} catch (error) {
result.errors.push(filePath);
}
return result;
}
private async inferInputType(input: string) {
if (typeof input !== 'string') throw new Error(`Invalid input type: '${typeof input}'`);
try {
const stats = await fs.stat(input);
if (stats.isFile()) return 'file';
if (stats.isDirectory()) return 'directory';
} catch (error) {
return 'string';
}
}
}
| src/modules/secrets-import/secrets-import.service.ts | Sadzurami-steam-sessions-creator-97b7294 | [
{
"filename": "src/modules/accounts-import/accounts-import.service.ts",
"retrieved_chunk": " const result: { values: Account[]; errors: string[] } = { values: [], errors: [] };\n try {\n let content = await this.readFilesQueue.add(() => fs.readFile(filePath, 'utf-8'));\n content = content.trim();\n if (content.length === 0) throw new Error('Empty file');\n // session file\n if (filePath.endsWith('.steamsession')) {\n const readResults = this.readAccountFromSessionFile(content);\n result.values.push(...readResults.values);\n if (readResults.errors.length > 0) result.errors.push(filePath);",
"score": 68.04246090959683
},
{
"filename": "src/modules/sessions-import/sessions-import.service.ts",
"retrieved_chunk": " private async readSessionsFromInput(input: string) {\n const inputType = await this.inferInputType(input);\n if (inputType === 'file') return this.readSessionFromFile(input);\n if (inputType === 'string') return { values: [], errors: [input] };\n if (inputType === 'directory') return { values: [], errors: [input] };\n }\n private async readSessionFromFile(filePath: string) {\n const result: { values: Session[]; errors: string[] } = { values: [], errors: [] };\n try {\n let content = await this.readFilesQueue.add(() => fs.readFile(filePath, 'utf-8'));",
"score": 52.93897648613027
},
{
"filename": "src/modules/accounts-import/accounts-import.service.ts",
"retrieved_chunk": " return result;\n }\n // asf json\n if (filePath.endsWith('.json') && content.includes('\"SteamLogin\"')) {\n const readResults = this.readAccountFromAsfJson(content);\n result.values.push(...readResults.values);\n if (readResults.errors.length > 0) result.errors.push(filePath);\n return result;\n }\n // plain text",
"score": 42.85099977959259
},
{
"filename": "src/modules/sessions-import/sessions-import.service.ts",
"retrieved_chunk": " content = JSON.parse(content);\n if (content == null || typeof content !== 'object' || Array.isArray(content)) {\n throw new Error('Invalid session file');\n }\n const session = Object.fromEntries(\n Object.entries(content).map(([key, value]) => [key[0].toLowerCase() + key.slice(1), value]),\n ) as unknown as Session;\n result.values.push(session);\n } catch (error) {\n result.errors.push(filePath);",
"score": 38.89843250731829
},
{
"filename": "src/modules/accounts-import/accounts-import.service.ts",
"retrieved_chunk": " if (content.includes(':')) {\n const lines = content\n .split(/\\s+|\\r?\\n/)\n .map((l) => l.trim())\n .filter((l) => l.length > 0);\n if (lines.length === 0) throw new Error('Empty file');\n for (const line of lines) {\n const readResults = this.readAccountFromString(line);\n result.values.push(...readResults.values);\n if (readResults.errors.length > 0) result.errors.push(line);",
"score": 33.46529708228791
}
] | typescript | new Secrets(content); |
import BookModel from "../models/BookModel";
import Bucket from "../models/Bucket";
import Token from "../lib/GenerateToken";
import { ERROR, MAX_EPUB_SIZE_MB } from "../common/const";
import { TokStatus, Book } from "../common/types";
import {
sendJsonResponse,
parseSimplePostData,
md5,
uuid,
} from "../common/utils";
import filetype from "file-type-cjs";
import fs from "node:fs";
import EPub from "epub";
import os from "node:os";
import path from "node:path";
import crypto from "node:crypto";
import { exec } from "node:child_process";
import http from "node:http";
async function getEpubCoverFromEpubFile_UNIX(
epubFilepath: string
): Promise<[Buffer, string] | null> {
let randomString = crypto.randomBytes(16).toString("hex");
let tempDir = path.join(os.tmpdir(), `tmp-${randomString}`);
fs.mkdirSync(tempDir);
let unzipCMD = `unzip -q ${epubFilepath} -d ${tempDir}`;
let unzipCMDExec = new Promise((resolve, reject) => {
exec(unzipCMD, (err: any, stdout: any, stderr: any) => {
if (err) reject(err);
resolve(stdout);
});
});
try {
await unzipCMDExec;
} catch (err) {
console.error(err);
fs.rmSync(tempDir, { recursive: true }); // we r good boys!
return null;
}
let findCMD = `find ${tempDir} -type f \\( -iname \\*.jpeg -o -iname \\*.jpg -o -iname \\*.png \\) | grep -Ei 'cover\\.|index-1_1'`;
let findCMDExec: Promise<string> = new Promise((resolve, reject) => {
exec(findCMD, (err: any, stdout: any, stderr: any) => {
if (err) reject(err);
resolve(stdout);
});
});
let selectedFilePath: string;
try {
selectedFilePath = await findCMDExec;
selectedFilePath = selectedFilePath.trim();
} catch (err) {
console.error(err);
fs.rmSync(tempDir, { recursive: true }); // we r good boys!
return null;
}
let ret: [Buffer, string] = [
Buffer.from(fs.readFileSync(selectedFilePath)),
selectedFilePath,
];
fs.rmSync(tempDir, { recursive: true }); // we r good boys!
return ret;
}
export default async function (
req: http.IncomingMessage,
res: http.ServerResponse
) {
const BOOK_DB = new BookModel();
const BUCKET = new Bucket();
await BOOK_DB.init();
await BUCKET.init();
try {
if (req.method === "GET") {
try {
let userBooks = await BOOK_DB.getBooks();
userBooks = userBooks.map((e) => {
delete e.path;
return e;
});
sendJsonResponse(res, userBooks, 200);
} catch (error) {
console.error(error);
sendJsonResponse(res, ERROR.internalErr);
}
} else if (req.method === "POST") {
const authorization = req.headers?.authorization;
const authToken = authorization?.split(" ")?.pop();
if (!authorization || !authToken) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const token = new Token();
const tokenStatus: TokStatus = token.verify(authToken);
if (
tokenStatus === TokStatus.INVALID ||
tokenStatus === TokStatus.INVALID_SIG
) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const parsedAuthToken: any = token.UNSAFE_parse(authToken);
let epubBuffer: Buffer;
epubBuffer = await parseSimplePostData(req);
let epubSizeInMB = Math.ceil(epubBuffer.length / 1e6);
let bufferMime = await filetype.fromBuffer(epubBuffer);
if (bufferMime.mime != "application/epub+zip") {
sendJsonResponse(res, ERROR.invalidMimeForResource, 415);
return;
}
if (epubSizeInMB > MAX_EPUB_SIZE_MB) {
sendJsonResponse(res, ERROR.fileTooLarge, 400);
return;
}
let randomString = crypto.randomBytes(16).toString("hex");
const tempEpubFilePath = path.join(os.tmpdir(), `tmp-${randomString}.epub`);
fs.writeFileSync(tempEpubFilePath, epubBuffer);
const epub: any = await new Promise((resolve, reject) => {
const epub = new EPub(tempEpubFilePath);
epub.on("end", () => resolve(epub));
epub.on("error", reject);
epub.parse();
});
let epubCoverBuffer = await getEpubCoverFromEpubFile_UNIX(tempEpubFilePath);
console.log(epubCoverBuffer);
let epubSignature = md5(epubBuffer.toString("hex"));
let foundBook = await BOOK_DB.getBook("", epubSignature);
if (foundBook) {
sendJsonResponse(
res,
{
...ERROR.resourceExists,
data: {
id: foundBook.id,
},
},
409
);
return;
}
let epubFilePermalink = await BUCKET.pushBufferWithName(
epubBuffer,
`${epubSignature}.epub`
);
let epubCoverPermalink = null;
if (epubCoverBuffer) {
epubCoverPermalink = await BUCKET.pushBufferWithName(
epubCoverBuffer[0],
`${epubSignature}.${epubCoverBuffer[1].split(".").pop()}`
);
}
let epubID = uuid();
let epubEntry: Book = {
id: epubID,
userid: parsedAuthToken.id,
title: epub.metadata?.title ?? epubID.split("-").pop(),
author: epub.metadata?.creator ?? parsedAuthToken.email,
path: epubFilePermalink,
signature: epubSignature,
cover: epubCoverPermalink,
};
const pushed = await BOOK_DB.pushBook(epubEntry);
if (!pushed) {
sendJsonResponse(res, ERROR.internalErr, 500);
return;
}
sendJsonResponse(
res,
{
error: null,
message: `successfully published a book of id ${epubEntry.id}`,
data: {
id: epubEntry.id,
},
},
201
);
} else if (req.method === "DELETE") {
const authorization = req.headers?.authorization;
const authToken = authorization?.split(" ")?.pop();
if (!authorization || !authToken) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const token = new Token();
const tokenStatus: TokStatus = token.verify(authToken);
if (
tokenStatus === TokStatus.INVALID ||
tokenStatus === TokStatus.INVALID_SIG
) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const parsedAuthToken: any = token.UNSAFE_parse(authToken);
let body: Buffer;
body = await parseSimplePostData(req);
let data: any;
try {
data = JSON.parse(body.toString());
} catch {
| sendJsonResponse(res, ERROR.invalidJSONData, 400); |
return;
}
if (!data.bookid) {
sendJsonResponse(res, ERROR.badRequest, 400);
return;
}
let bookDeleted = await BOOK_DB.deleteBook(data.bookid, parsedAuthToken.id);
if (!bookDeleted) {
sendJsonResponse(res, {
error: "unable-to-delete-book",
message: `was unable to delete book ${data.bookid}, perhaps the id was invalid?`,
status: 404
}, 404)
return;
}
sendJsonResponse(res, {
error: null,
message: `successfully deleted book of id ${data.bookid}`,
status: 204,
data: {
id: data.bookid,
}
}, 204)
}
} finally {
await BOOK_DB.close();
}
}
| src/routes/Books.ts | Aadv1k-quillia-52c5b34 | [
{
"filename": "src/routes/Login.ts",
"retrieved_chunk": " const DB = new UserModel();\n let data: any = await parseSimplePostData(req);\n data = data.toString();\n if (req.method !== \"POST\") {\n sendJsonResponse(res, ERROR.methodNotAllowed, 405);\n return;\n }\n let parsedData: User;\n try {\n parsedData = JSON.parse(data);",
"score": 31.121681761180955
},
{
"filename": "src/lib/GenerateToken.ts",
"retrieved_chunk": " }\n // assumes that the token is valid\n UNSAFE_parse(token: string): object {\n const [ _a, body, _b ] = token.split(\".\");\n const parsedBody = Buffer.from(body, \"base64\").toString(\"utf-8\");\n const parsedJson = JSON.parse(parsedBody);\n return parsedJson;\n } \n private sign(data: string): string {\n return createHmac(JWT.HASH, JWT.SECRET)",
"score": 28.923075206532417
},
{
"filename": "src/routes/Signup.ts",
"retrieved_chunk": ") {\n const DB = new UserModel();\n if (req.method !== \"POST\") {\n sendJsonResponse(res, ERROR.methodNotAllowed, 405);\n return;\n }\n let data: any = await parseSimplePostData(req);\n data = data.toString();\n let parsedData: User;\n try {",
"score": 27.168260794396133
},
{
"filename": "src/lib/GenerateToken.ts",
"retrieved_chunk": " }\n let b64Head = Buffer.from(JSON.stringify(head)).toString(\"base64\").replace(/=/g, \"\");\n let b64Body = Buffer.from(JSON.stringify(body)).toString(\"base64\").replace(/=/g, \"\");\n let signature = this.sign(`${b64Head}.${b64Body}`);\n return `${b64Head}.${b64Body}.${signature}`\n }\n verify(token: string): TokStatus {\n let [head, body, signature] = token.split('.');\n if (!head || !body || !signature) {\n return TokStatus.INVALID;",
"score": 27.123186808413156
},
{
"filename": "src/routes/Issue.ts",
"retrieved_chunk": " return;\n }\n await ISSUE_DB.init();\n await BOOK_DB.init();\n await USER_DB.init();\n const parsedAuthToken: any = token.UNSAFE_parse(authToken);\n if (req.method === \"GET\") {\n let URLParams = req.url.split(\"/\").slice(3);\n let requestedBook = URLParams?.[0];\n if (requestedBook) {",
"score": 23.30433205076515
}
] | typescript | sendJsonResponse(res, ERROR.invalidJSONData, 400); |
import fs from 'fs/promises';
import inquirer from 'inquirer';
import pQueue from 'p-queue';
import { setTimeout as delay } from 'timers/promises';
import { Injectable, Logger } from '@nestjs/common';
import { Session } from '../../interfaces/session.interface';
@Injectable()
export class SessionsImportService {
private readonly logger = new Logger(SessionsImportService.name);
private readonly readFilesQueue = new pQueue({ concurrency: 100 });
public async loadSessions(input: string[] | string) {
if (!input) return [];
if (!Array.isArray(input)) input = [input];
if (input.length === 0) return [];
let sessions: Session[] = [];
const errors: string[] = [];
const readResults = await Promise.all(input.map((input) => this.readSessionsFromInput(input)));
for (const result of readResults) {
sessions.push(...result.values);
errors.push(...result.errors);
}
sessions = this.removeDuplicates(sessions);
if (errors.length > 0 && sessions.length > 0) {
this.logger.warn(`The following session sources are invalid:\n${errors.join('\n')}`);
await delay(1000);
const { confirm } = await inquirer.prompt({
type: 'confirm',
name: 'confirm',
message: 'Continue with the valid sessions?',
default: false,
});
if (!confirm) throw new Error('Aborted by user');
}
return sessions;
}
private removeDuplicates(sessions: Session[]) {
const map = new Map<string, Session>();
| for (const session of sessions) map.set(session.username, session); |
return [...map.values()];
}
private async readSessionsFromInput(input: string) {
const inputType = await this.inferInputType(input);
if (inputType === 'file') return this.readSessionFromFile(input);
if (inputType === 'string') return { values: [], errors: [input] };
if (inputType === 'directory') return { values: [], errors: [input] };
}
private async readSessionFromFile(filePath: string) {
const result: { values: Session[]; errors: string[] } = { values: [], errors: [] };
try {
let content = await this.readFilesQueue.add(() => fs.readFile(filePath, 'utf-8'));
content = JSON.parse(content);
if (content == null || typeof content !== 'object' || Array.isArray(content)) {
throw new Error('Invalid session file');
}
const session = Object.fromEntries(
Object.entries(content).map(([key, value]) => [key[0].toLowerCase() + key.slice(1), value]),
) as unknown as Session;
result.values.push(session);
} catch (error) {
result.errors.push(filePath);
}
return result;
}
private async inferInputType(input: string) {
if (typeof input !== 'string') throw new Error(`Invalid input type: '${typeof input}'`);
try {
const stats = await fs.stat(input);
if (stats.isFile()) return 'file';
if (stats.isDirectory()) return 'directory';
} catch (error) {
return 'string';
}
}
}
| src/modules/sessions-import/sessions-import.service.ts | Sadzurami-steam-sessions-creator-97b7294 | [
{
"filename": "src/modules/secrets-import/secrets-import.service.ts",
"retrieved_chunk": " default: false,\n });\n if (!confirm) throw new Error('Aborted by user');\n }\n return secrets;\n }\n private removeDuplicates(secrets: Secrets[]) {\n const map = new Map<string, Secrets>();\n for (const secret of secrets) map.set(secret.username, secret);\n return [...map.values()];",
"score": 47.324408903073596
},
{
"filename": "src/modules/proxies-import/proxies-import.service.ts",
"retrieved_chunk": " if (!confirm) throw new Error('Aborted by user');\n }\n return proxies;\n }\n private removeDuplicates(proxies: Proxy[]) {\n const map = new Map<string, Proxy>();\n for (const proxy of proxies) map.set(proxy.toString(), proxy);\n return [...map.values()];\n }\n private async readProxyFromInput(input: string) {",
"score": 38.835863271539424
},
{
"filename": "src/modules/accounts-import/accounts-import.service.ts",
"retrieved_chunk": " if (errors.length > 0 && accounts.length > 0) {\n this.logger.warn(`The following account sources are invalid:\\n${errors.join('\\n')}`);\n await delay(1000);\n const { confirm } = await inquirer.prompt({\n type: 'confirm',\n name: 'confirm',\n message: 'Continue with the valid accounts?',\n default: false,\n });\n if (!confirm) throw new Error('Aborted by user');",
"score": 38.166736177778176
},
{
"filename": "src/commands/validate/validate-sessions.service.ts",
"retrieved_chunk": " const valid: Session[] = [];\n const invalid: Session[] = [];\n for (const session of sessions) {\n const { valid: isValid, errors, expires } = await this.validateSession(session);\n if (isValid) {\n valid.push(session);\n this.logger.log(\n `Valid: ${session.username}, days: ${Math.floor((expires - Date.now()) / (24 * 60 * 60 * 1000))}`,\n );\n } else {",
"score": 31.963483733985733
},
{
"filename": "src/commands/validate/validate-sessions.service.ts",
"retrieved_chunk": " invalid.push(session);\n this.logger.warn(`Invalid: ${session.username}, errors: ${errors.join(', ')}`);\n }\n }\n if (invalid.length > 0) {\n this.logger.warn(`Invalid sessions:\\n${invalid.map((session) => session.username).join('\\n')}`);\n }\n await delay(1000);\n }\n private async validateSession(session: Session) {",
"score": 27.574366183896167
}
] | typescript | for (const session of sessions) map.set(session.username, session); |
import BookModel from "../models/BookModel";
import Bucket from "../models/Bucket";
import Token from "../lib/GenerateToken";
import { ERROR, MAX_EPUB_SIZE_MB } from "../common/const";
import { TokStatus, Book } from "../common/types";
import {
sendJsonResponse,
parseSimplePostData,
md5,
uuid,
} from "../common/utils";
import filetype from "file-type-cjs";
import fs from "node:fs";
import EPub from "epub";
import os from "node:os";
import path from "node:path";
import crypto from "node:crypto";
import { exec } from "node:child_process";
import http from "node:http";
async function getEpubCoverFromEpubFile_UNIX(
epubFilepath: string
): Promise<[Buffer, string] | null> {
let randomString = crypto.randomBytes(16).toString("hex");
let tempDir = path.join(os.tmpdir(), `tmp-${randomString}`);
fs.mkdirSync(tempDir);
let unzipCMD = `unzip -q ${epubFilepath} -d ${tempDir}`;
let unzipCMDExec = new Promise((resolve, reject) => {
exec(unzipCMD, (err: any, stdout: any, stderr: any) => {
if (err) reject(err);
resolve(stdout);
});
});
try {
await unzipCMDExec;
} catch (err) {
console.error(err);
fs.rmSync(tempDir, { recursive: true }); // we r good boys!
return null;
}
let findCMD = `find ${tempDir} -type f \\( -iname \\*.jpeg -o -iname \\*.jpg -o -iname \\*.png \\) | grep -Ei 'cover\\.|index-1_1'`;
let findCMDExec: Promise<string> = new Promise((resolve, reject) => {
exec(findCMD, (err: any, stdout: any, stderr: any) => {
if (err) reject(err);
resolve(stdout);
});
});
let selectedFilePath: string;
try {
selectedFilePath = await findCMDExec;
selectedFilePath = selectedFilePath.trim();
} catch (err) {
console.error(err);
fs.rmSync(tempDir, { recursive: true }); // we r good boys!
return null;
}
let ret: [Buffer, string] = [
Buffer.from(fs.readFileSync(selectedFilePath)),
selectedFilePath,
];
fs.rmSync(tempDir, { recursive: true }); // we r good boys!
return ret;
}
export default async function (
req: http.IncomingMessage,
res: http.ServerResponse
) {
const BOOK_DB = new BookModel();
const BUCKET = new Bucket();
await BOOK_DB.init();
await BUCKET.init();
try {
if (req.method === "GET") {
try {
let userBooks = await BOOK_DB.getBooks();
userBooks = userBooks.map((e) => {
delete e.path;
return e;
});
sendJsonResponse(res, userBooks, 200);
} catch (error) {
console.error(error);
sendJsonResponse(res, ERROR.internalErr);
}
} else if (req.method === "POST") {
const authorization = req.headers?.authorization;
const authToken = authorization?.split(" ")?.pop();
if (!authorization || !authToken) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const token = new Token();
const tokenStatus: TokStatus = token.verify(authToken);
if (
tokenStatus === TokStatus.INVALID ||
tokenStatus === TokStatus.INVALID_SIG
) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const parsedAuthToken: any = token.UNSAFE_parse(authToken);
let epubBuffer: Buffer;
epubBuffer = await parseSimplePostData(req);
let epubSizeInMB = Math.ceil(epubBuffer.length / 1e6);
let bufferMime = await filetype.fromBuffer(epubBuffer);
if (bufferMime.mime != "application/epub+zip") {
sendJsonResponse(res, ERROR.invalidMimeForResource, 415);
return;
}
if (epubSizeInMB > MAX_EPUB_SIZE_MB) {
sendJsonResponse(res, ERROR.fileTooLarge, 400);
return;
}
let randomString = crypto.randomBytes(16).toString("hex");
const tempEpubFilePath = path.join(os.tmpdir(), `tmp-${randomString}.epub`);
fs.writeFileSync(tempEpubFilePath, epubBuffer);
const epub: any = await new Promise((resolve, reject) => {
const epub = new EPub(tempEpubFilePath);
epub.on("end", () => resolve(epub));
epub.on("error", reject);
epub.parse();
});
let epubCoverBuffer = await getEpubCoverFromEpubFile_UNIX(tempEpubFilePath);
console.log(epubCoverBuffer);
let epubSignature = md5(epubBuffer.toString("hex"));
let foundBook = await BOOK_DB.getBook("", epubSignature);
if (foundBook) {
sendJsonResponse(
res,
{
...ERROR.resourceExists,
data: {
id: foundBook.id,
},
},
409
);
return;
}
let epubFilePermalink = await BUCKET.pushBufferWithName(
epubBuffer,
`${epubSignature}.epub`
);
let epubCoverPermalink = null;
if (epubCoverBuffer) {
epubCoverPermalink = await BUCKET.pushBufferWithName(
epubCoverBuffer[0],
`${epubSignature}.${epubCoverBuffer[1].split(".").pop()}`
);
}
let epubID = uuid();
let | epubEntry: Book = { |
id: epubID,
userid: parsedAuthToken.id,
title: epub.metadata?.title ?? epubID.split("-").pop(),
author: epub.metadata?.creator ?? parsedAuthToken.email,
path: epubFilePermalink,
signature: epubSignature,
cover: epubCoverPermalink,
};
const pushed = await BOOK_DB.pushBook(epubEntry);
if (!pushed) {
sendJsonResponse(res, ERROR.internalErr, 500);
return;
}
sendJsonResponse(
res,
{
error: null,
message: `successfully published a book of id ${epubEntry.id}`,
data: {
id: epubEntry.id,
},
},
201
);
} else if (req.method === "DELETE") {
const authorization = req.headers?.authorization;
const authToken = authorization?.split(" ")?.pop();
if (!authorization || !authToken) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const token = new Token();
const tokenStatus: TokStatus = token.verify(authToken);
if (
tokenStatus === TokStatus.INVALID ||
tokenStatus === TokStatus.INVALID_SIG
) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const parsedAuthToken: any = token.UNSAFE_parse(authToken);
let body: Buffer;
body = await parseSimplePostData(req);
let data: any;
try {
data = JSON.parse(body.toString());
} catch {
sendJsonResponse(res, ERROR.invalidJSONData, 400);
return;
}
if (!data.bookid) {
sendJsonResponse(res, ERROR.badRequest, 400);
return;
}
let bookDeleted = await BOOK_DB.deleteBook(data.bookid, parsedAuthToken.id);
if (!bookDeleted) {
sendJsonResponse(res, {
error: "unable-to-delete-book",
message: `was unable to delete book ${data.bookid}, perhaps the id was invalid?`,
status: 404
}, 404)
return;
}
sendJsonResponse(res, {
error: null,
message: `successfully deleted book of id ${data.bookid}`,
status: 204,
data: {
id: data.bookid,
}
}, 204)
}
} finally {
await BOOK_DB.close();
}
}
| src/routes/Books.ts | Aadv1k-quillia-52c5b34 | [
{
"filename": "src/routes/Issue.ts",
"retrieved_chunk": " return;\n }\n await ISSUE_DB.init();\n await BOOK_DB.init();\n await USER_DB.init();\n const parsedAuthToken: any = token.UNSAFE_parse(authToken);\n if (req.method === \"GET\") {\n let URLParams = req.url.split(\"/\").slice(3);\n let requestedBook = URLParams?.[0];\n if (requestedBook) {",
"score": 10.611082591382184
},
{
"filename": "src/models/BookModel.ts",
"retrieved_chunk": " }\n }\n async bookExists(bookid: string): Promise<boolean> {\n const result = await this.client.query(\"SELECT EXISTS (SELECT 1 FROM books WHERE id = $1)\", [bookid])\n return result.rows[0].exists\n } \n async getBooks(): Promise<Array<Book> | null> {\n try {\n let response = await this.client.query(\"SELECT * FROM books\");\n return response.rows;",
"score": 9.92325141216121
},
{
"filename": "src/routes/Signup.ts",
"retrieved_chunk": " id: uuid(),\n email: parsedData.email,\n password: md5(parsedData.password),\n } \n const token = new Token();\n let pushed = await DB.pushUser(user)\n const { password, ...tokenBody} = user;\n let accessToken = token.generate(tokenBody);\n if (pushed !== null) {\n sendJsonResponse(res, {",
"score": 8.730631410089146
},
{
"filename": "src/routes/Issue.ts",
"retrieved_chunk": " id: foundIssue.id,\n bookid: foundIssue.bookid,\n },\n },\n 409\n );\n return;\n }\n let issueid = uuid();\n let issueEntry: Issue = {",
"score": 8.50937352393995
},
{
"filename": "src/common/utils.ts",
"retrieved_chunk": "}\nexport function sendPublicFile(res: ServerResponse, filepath: string) {\n let resourcePath = path.join(__dirname, \"../../public\", filepath)\n if (!existsSync(resourcePath)) {\n // we hope to handle the 404 state on the frontend\n resourcePath = path.join(__dirname, \"../../public\", \"index.html\")\n }\n let ext = resourcePath.split('.').pop();\n res.writeHead(200, { \"Content-type\": MIME[ext] });\n res.write(readFileSync(resourcePath))",
"score": 8.495642271984218
}
] | typescript | epubEntry: Book = { |
import BookModel from "../models/BookModel";
import Bucket from "../models/Bucket";
import Token from "../lib/GenerateToken";
import { ERROR, MAX_EPUB_SIZE_MB } from "../common/const";
import { TokStatus, Book } from "../common/types";
import {
sendJsonResponse,
parseSimplePostData,
md5,
uuid,
} from "../common/utils";
import filetype from "file-type-cjs";
import fs from "node:fs";
import EPub from "epub";
import os from "node:os";
import path from "node:path";
import crypto from "node:crypto";
import { exec } from "node:child_process";
import http from "node:http";
async function getEpubCoverFromEpubFile_UNIX(
epubFilepath: string
): Promise<[Buffer, string] | null> {
let randomString = crypto.randomBytes(16).toString("hex");
let tempDir = path.join(os.tmpdir(), `tmp-${randomString}`);
fs.mkdirSync(tempDir);
let unzipCMD = `unzip -q ${epubFilepath} -d ${tempDir}`;
let unzipCMDExec = new Promise((resolve, reject) => {
exec(unzipCMD, (err: any, stdout: any, stderr: any) => {
if (err) reject(err);
resolve(stdout);
});
});
try {
await unzipCMDExec;
} catch (err) {
console.error(err);
fs.rmSync(tempDir, { recursive: true }); // we r good boys!
return null;
}
let findCMD = `find ${tempDir} -type f \\( -iname \\*.jpeg -o -iname \\*.jpg -o -iname \\*.png \\) | grep -Ei 'cover\\.|index-1_1'`;
let findCMDExec: Promise<string> = new Promise((resolve, reject) => {
exec(findCMD, (err: any, stdout: any, stderr: any) => {
if (err) reject(err);
resolve(stdout);
});
});
let selectedFilePath: string;
try {
selectedFilePath = await findCMDExec;
selectedFilePath = selectedFilePath.trim();
} catch (err) {
console.error(err);
fs.rmSync(tempDir, { recursive: true }); // we r good boys!
return null;
}
let ret: [Buffer, string] = [
Buffer.from(fs.readFileSync(selectedFilePath)),
selectedFilePath,
];
fs.rmSync(tempDir, { recursive: true }); // we r good boys!
return ret;
}
export default async function (
req: http.IncomingMessage,
res: http.ServerResponse
) {
const BOOK_DB = new BookModel();
const BUCKET = new Bucket();
await BOOK_DB.init();
await BUCKET.init();
try {
if (req.method === "GET") {
try {
let userBooks = await BOOK_DB.getBooks();
userBooks = userBooks.map((e) => {
delete e.path;
return e;
});
sendJsonResponse(res, userBooks, 200);
} catch (error) {
console.error(error);
sendJsonResponse(res, ERROR.internalErr);
}
} else if (req.method === "POST") {
const authorization = req.headers?.authorization;
const authToken = authorization?.split(" ")?.pop();
if (!authorization || !authToken) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const token = new Token();
const tokenStatus: TokStatus = token.verify(authToken);
if (
tokenStatus === TokStatus.INVALID ||
tokenStatus === TokStatus.INVALID_SIG
) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const parsedAuthToken: any = token.UNSAFE_parse(authToken);
let epubBuffer: Buffer;
epubBuffer = await parseSimplePostData(req);
let epubSizeInMB = Math.ceil(epubBuffer.length / 1e6);
let bufferMime = await filetype.fromBuffer(epubBuffer);
if (bufferMime.mime != "application/epub+zip") {
sendJsonResponse(res, ERROR.invalidMimeForResource, 415);
return;
}
if (epubSizeInMB > MAX_EPUB_SIZE_MB) {
sendJsonResponse(res, ERROR.fileTooLarge, 400);
return;
}
let randomString = crypto.randomBytes(16).toString("hex");
const tempEpubFilePath = path.join(os.tmpdir(), `tmp-${randomString}.epub`);
fs.writeFileSync(tempEpubFilePath, epubBuffer);
const epub: any = await new Promise((resolve, reject) => {
const epub = new EPub(tempEpubFilePath);
epub.on("end", () => resolve(epub));
epub.on("error", reject);
epub.parse();
});
let epubCoverBuffer = await getEpubCoverFromEpubFile_UNIX(tempEpubFilePath);
console.log(epubCoverBuffer);
let epubSignature = md5(epubBuffer.toString("hex"));
let foundBook = await BOOK_DB.getBook("", epubSignature);
if (foundBook) {
sendJsonResponse(
res,
{
...ERROR.resourceExists,
data: {
id: foundBook.id,
},
},
409
);
return;
}
| let epubFilePermalink = await BUCKET.pushBufferWithName(
epubBuffer,
`${epubSignature}.epub`
); |
let epubCoverPermalink = null;
if (epubCoverBuffer) {
epubCoverPermalink = await BUCKET.pushBufferWithName(
epubCoverBuffer[0],
`${epubSignature}.${epubCoverBuffer[1].split(".").pop()}`
);
}
let epubID = uuid();
let epubEntry: Book = {
id: epubID,
userid: parsedAuthToken.id,
title: epub.metadata?.title ?? epubID.split("-").pop(),
author: epub.metadata?.creator ?? parsedAuthToken.email,
path: epubFilePermalink,
signature: epubSignature,
cover: epubCoverPermalink,
};
const pushed = await BOOK_DB.pushBook(epubEntry);
if (!pushed) {
sendJsonResponse(res, ERROR.internalErr, 500);
return;
}
sendJsonResponse(
res,
{
error: null,
message: `successfully published a book of id ${epubEntry.id}`,
data: {
id: epubEntry.id,
},
},
201
);
} else if (req.method === "DELETE") {
const authorization = req.headers?.authorization;
const authToken = authorization?.split(" ")?.pop();
if (!authorization || !authToken) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const token = new Token();
const tokenStatus: TokStatus = token.verify(authToken);
if (
tokenStatus === TokStatus.INVALID ||
tokenStatus === TokStatus.INVALID_SIG
) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const parsedAuthToken: any = token.UNSAFE_parse(authToken);
let body: Buffer;
body = await parseSimplePostData(req);
let data: any;
try {
data = JSON.parse(body.toString());
} catch {
sendJsonResponse(res, ERROR.invalidJSONData, 400);
return;
}
if (!data.bookid) {
sendJsonResponse(res, ERROR.badRequest, 400);
return;
}
let bookDeleted = await BOOK_DB.deleteBook(data.bookid, parsedAuthToken.id);
if (!bookDeleted) {
sendJsonResponse(res, {
error: "unable-to-delete-book",
message: `was unable to delete book ${data.bookid}, perhaps the id was invalid?`,
status: 404
}, 404)
return;
}
sendJsonResponse(res, {
error: null,
message: `successfully deleted book of id ${data.bookid}`,
status: 204,
data: {
id: data.bookid,
}
}, 204)
}
} finally {
await BOOK_DB.close();
}
}
| src/routes/Books.ts | Aadv1k-quillia-52c5b34 | [
{
"filename": "src/common/utils.ts",
"retrieved_chunk": " res.writeHead(code ?? 200, {\n \"Content-type\": \"application/epub+zip\"\n });\n res.write(epubBuffer);\n}\nexport function uuid(): string {\n const nid = nanoid.customAlphabet(\"1234567890abcdef\", 10);\n let id = nid();\n return id;\n}",
"score": 8.87515259408984
},
{
"filename": "src/routes/Issue.ts",
"retrieved_chunk": " res.on(\"end\", () => resolve(data));\n res.on(\"error\", (error) => reject(error));\n });\n });\n let epubBuffer = Buffer.concat(response);\n sendEpubResponse(res, epubBuffer);\n return;\n } else {\n let userIssues = await ISSUE_DB.getIssues(parsedAuthToken.id);\n if (!userIssues) {",
"score": 6.622191097038099
},
{
"filename": "src/routes/Issue.ts",
"retrieved_chunk": " id: foundIssue.id,\n bookid: foundIssue.bookid,\n },\n },\n 409\n );\n return;\n }\n let issueid = uuid();\n let issueEntry: Issue = {",
"score": 5.695677848459726
},
{
"filename": "src/routes/Signup.ts",
"retrieved_chunk": " sendJsonResponse(res, ERROR.badRequest, 400);\n return;\n }\n await DB.init();\n let foundUser = await DB.getUser(parsedData.email);\n if (foundUser) {\n sendJsonResponse(res, ERROR.userAlreadyExists, 409)\n return;\n }\n let user: User = {",
"score": 5.417022789321514
},
{
"filename": "src/models/Bucket.ts",
"retrieved_chunk": " this.isLocal = false;\n if (!CLOUDINARY_CONF.API_SECRET) {\n this.isLocal = true;\n this.bucketPath = path.join(__dirname, \"../BUCKET\");\n } else {\n cloudinary.config({\n cloud_name: CLOUDINARY_CONF.CLOUD_NAME,\n api_key: CLOUDINARY_CONF.API_KEY,\n api_secret: CLOUDINARY_CONF.API_SECRET\n })",
"score": 4.3634116294937435
}
] | typescript | let epubFilePermalink = await BUCKET.pushBufferWithName(
epubBuffer,
`${epubSignature}.epub`
); |
import BookModel from "../models/BookModel";
import Bucket from "../models/Bucket";
import Token from "../lib/GenerateToken";
import { ERROR, MAX_EPUB_SIZE_MB } from "../common/const";
import { TokStatus, Book } from "../common/types";
import {
sendJsonResponse,
parseSimplePostData,
md5,
uuid,
} from "../common/utils";
import filetype from "file-type-cjs";
import fs from "node:fs";
import EPub from "epub";
import os from "node:os";
import path from "node:path";
import crypto from "node:crypto";
import { exec } from "node:child_process";
import http from "node:http";
async function getEpubCoverFromEpubFile_UNIX(
epubFilepath: string
): Promise<[Buffer, string] | null> {
let randomString = crypto.randomBytes(16).toString("hex");
let tempDir = path.join(os.tmpdir(), `tmp-${randomString}`);
fs.mkdirSync(tempDir);
let unzipCMD = `unzip -q ${epubFilepath} -d ${tempDir}`;
let unzipCMDExec = new Promise((resolve, reject) => {
exec(unzipCMD, (err: any, stdout: any, stderr: any) => {
if (err) reject(err);
resolve(stdout);
});
});
try {
await unzipCMDExec;
} catch (err) {
console.error(err);
fs.rmSync(tempDir, { recursive: true }); // we r good boys!
return null;
}
let findCMD = `find ${tempDir} -type f \\( -iname \\*.jpeg -o -iname \\*.jpg -o -iname \\*.png \\) | grep -Ei 'cover\\.|index-1_1'`;
let findCMDExec: Promise<string> = new Promise((resolve, reject) => {
exec(findCMD, (err: any, stdout: any, stderr: any) => {
if (err) reject(err);
resolve(stdout);
});
});
let selectedFilePath: string;
try {
selectedFilePath = await findCMDExec;
selectedFilePath = selectedFilePath.trim();
} catch (err) {
console.error(err);
fs.rmSync(tempDir, { recursive: true }); // we r good boys!
return null;
}
let ret: [Buffer, string] = [
Buffer.from(fs.readFileSync(selectedFilePath)),
selectedFilePath,
];
fs.rmSync(tempDir, { recursive: true }); // we r good boys!
return ret;
}
export default async function (
req: http.IncomingMessage,
res: http.ServerResponse
) {
const BOOK_DB = new BookModel();
const BUCKET = new Bucket();
await BOOK_DB.init();
await BUCKET.init();
try {
if (req.method === "GET") {
try {
let userBooks = await BOOK_DB.getBooks();
userBooks = userBooks.map((e) => {
delete e.path;
return e;
});
sendJsonResponse(res, userBooks, 200);
} catch (error) {
console.error(error);
sendJsonResponse(res, ERROR.internalErr);
}
} else if (req.method === "POST") {
const authorization = req.headers?.authorization;
const authToken = authorization?.split(" ")?.pop();
if (!authorization || !authToken) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const token = new Token();
const tokenStatus: TokStatus = token.verify(authToken);
if (
tokenStatus === TokStatus.INVALID ||
tokenStatus === TokStatus.INVALID_SIG
) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const parsedAuthToken: any = token.UNSAFE_parse(authToken);
let epubBuffer: Buffer;
epubBuffer = await parseSimplePostData(req);
let epubSizeInMB = Math.ceil(epubBuffer.length / 1e6);
let bufferMime = await filetype.fromBuffer(epubBuffer);
if (bufferMime.mime != "application/epub+zip") {
sendJsonResponse(res, ERROR.invalidMimeForResource, 415);
return;
}
if (epubSizeInMB > MAX_EPUB_SIZE_MB) {
sendJsonResponse(res, ERROR.fileTooLarge, 400);
return;
}
let randomString = crypto.randomBytes(16).toString("hex");
const tempEpubFilePath = path.join(os.tmpdir(), `tmp-${randomString}.epub`);
fs.writeFileSync(tempEpubFilePath, epubBuffer);
const epub: any = await new Promise((resolve, reject) => {
const epub = new EPub(tempEpubFilePath);
epub.on("end", () => resolve(epub));
epub.on("error", reject);
epub.parse();
});
let epubCoverBuffer = await getEpubCoverFromEpubFile_UNIX(tempEpubFilePath);
console.log(epubCoverBuffer);
let epubSignature = md5(epubBuffer.toString("hex"));
let foundBook = await BOOK_DB.getBook("", epubSignature);
if (foundBook) {
sendJsonResponse(
res,
{
...ERROR.resourceExists,
data: {
id: foundBook.id,
},
},
409
);
return;
}
let epubFilePermalink = await BUCKET.pushBufferWithName(
epubBuffer,
`${epubSignature}.epub`
);
let epubCoverPermalink = null;
if (epubCoverBuffer) {
epubCoverPermalink = await BUCKET.pushBufferWithName(
epubCoverBuffer[0],
`${epubSignature}.${epubCoverBuffer[1].split(".").pop()}`
);
}
let epubID = uuid();
let epubEntry: Book = {
id: epubID,
userid: parsedAuthToken.id,
title: epub.metadata?.title ?? epubID.split("-").pop(),
author: epub.metadata?.creator ?? parsedAuthToken.email,
path: epubFilePermalink,
signature: epubSignature,
cover: epubCoverPermalink,
};
const pushed = await BOOK_DB.pushBook(epubEntry);
if (!pushed) {
sendJsonResponse(res, ERROR.internalErr, 500);
return;
}
sendJsonResponse(
res,
{
error: null,
message: `successfully published a book of id ${epubEntry.id}`,
data: {
id: epubEntry.id,
},
},
201
);
} else if (req.method === "DELETE") {
const authorization = req.headers?.authorization;
const authToken = authorization?.split(" ")?.pop();
if (!authorization || !authToken) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const token = new Token();
const tokenStatus: TokStatus = token.verify(authToken);
if (
tokenStatus === TokStatus.INVALID ||
tokenStatus === TokStatus.INVALID_SIG
) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const parsedAuthToken: any = token.UNSAFE_parse(authToken);
let body: Buffer;
body = await parseSimplePostData(req);
let data: any;
try {
data = JSON.parse(body.toString());
} catch {
sendJsonResponse(res, ERROR.invalidJSONData, 400);
return;
}
if (!data.bookid) {
sendJsonResponse(res, ERROR.badRequest, 400);
return;
}
let bookDeleted = | await BOOK_DB.deleteBook(data.bookid, parsedAuthToken.id); |
if (!bookDeleted) {
sendJsonResponse(res, {
error: "unable-to-delete-book",
message: `was unable to delete book ${data.bookid}, perhaps the id was invalid?`,
status: 404
}, 404)
return;
}
sendJsonResponse(res, {
error: null,
message: `successfully deleted book of id ${data.bookid}`,
status: 204,
data: {
id: data.bookid,
}
}, 204)
}
} finally {
await BOOK_DB.close();
}
}
| src/routes/Books.ts | Aadv1k-quillia-52c5b34 | [
{
"filename": "src/routes/Signup.ts",
"retrieved_chunk": " parsedData = JSON.parse(data === \"\" ? '{}' : data);\n } catch {\n sendJsonResponse(res, ERROR.invalidJSONData, 400)\n return;\n }\n if (!parsedData.email || !parsedData.password) {\n sendJsonResponse(res, ERROR.badRequest, 400);\n return;\n }\n if (!isEmailValid(parsedData.email)) {",
"score": 30.244712688333397
},
{
"filename": "src/routes/Issue.ts",
"retrieved_chunk": " sendJsonResponse(res, ERROR.badRequest, 400);\n return;\n }\n let foundLender = await USER_DB.getUserByID(issueData.lenderid);\n let foundBook = await BOOK_DB.getBook(issueData.bookid);\n if (!foundLender || !foundBook) {\n sendJsonResponse(res, ERROR.resourceNotExists, 404);\n return;\n }\n let foundIssue = await ISSUE_DB.getIssue(",
"score": 26.137453421732815
},
{
"filename": "src/routes/Issue.ts",
"retrieved_chunk": " let issueData: Issue;\n try {\n let issuePostData = await parseSimplePostData(req);\n issueData = JSON.parse(issuePostData.toString());\n } catch (error) {\n console.error(error);\n sendJsonResponse(res, ERROR.badRequest, 400);\n return;\n }\n if (!issueData.lenderid || !issueData.bookid) {",
"score": 22.22385333631416
},
{
"filename": "src/routes/Signup.ts",
"retrieved_chunk": " sendJsonResponse(res, ERROR.badRequest, 400);\n return;\n }\n await DB.init();\n let foundUser = await DB.getUser(parsedData.email);\n if (foundUser) {\n sendJsonResponse(res, ERROR.userAlreadyExists, 409)\n return;\n }\n let user: User = {",
"score": 21.267638012885943
},
{
"filename": "src/routes/Login.ts",
"retrieved_chunk": " } catch(error) {\n sendJsonResponse(res, ERROR.invalidJSONData, 400)\n return;\n }\n await DB.init();\n const foundUser: User = await DB.getUser(parsedData.email);\n await DB.close();\n if (!foundUser) {\n sendJsonResponse(res, ERROR.userNotFound, 404);\n return;",
"score": 20.887161803859126
}
] | typescript | await BOOK_DB.deleteBook(data.bookid, parsedAuthToken.id); |
import IssueModel from "../models/IssueModel";
import BookModel from "../models/BookModel";
import UserModel from "../models/UserModel";
import Token from "../lib/GenerateToken";
import { ERROR } from "../common/const";
import { TokStatus, Issue } from "../common/types";
import {
sendJsonResponse,
sendEpubResponse,
parseSimplePostData,
uuid,
getBufferFromRawURL,
} from "../common/utils";
import http from "node:http";
import https from "node:https";
export default async function (
req: http.IncomingMessage,
res: http.ServerResponse
) {
const ISSUE_DB = new IssueModel();
const BOOK_DB = new BookModel();
const USER_DB = new UserModel();
const authorization = req.headers?.authorization;
const authToken = authorization?.split(" ")?.pop()?.trim();
try {
if (req.method === "OPTIONS") {
sendJsonResponse(res, {}, 200);
return;
}
if (!authorization || !authToken) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const token = new Token();
const tokenStatus: TokStatus = token.verify(authToken);
if (
tokenStatus === TokStatus.INVALID ||
tokenStatus === TokStatus.INVALID_SIG
) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
await ISSUE_DB.init();
await BOOK_DB.init();
await USER_DB.init();
const parsedAuthToken: any = token.UNSAFE_parse(authToken);
if (req.method === "GET") {
let URLParams = req.url.split("/").slice(3);
let requestedBook = URLParams?.[0];
if (requestedBook) {
let | targetBook = await BOOK_DB.getBook(requestedBook); |
if (!targetBook) {
sendJsonResponse(res, ERROR.resourceNotExists, 404);
return;
}
let epubResourcePath = targetBook.path;
const response: Array<Buffer> = await new Promise((resolve, reject) => {
https.get(epubResourcePath, (res) => {
let data: Array<Buffer> = [];
res.on("data", (d: Buffer) => data.push(d));
res.on("end", () => resolve(data));
res.on("error", (error) => reject(error));
});
});
let epubBuffer = Buffer.concat(response);
sendEpubResponse(res, epubBuffer);
return;
} else {
let userIssues = await ISSUE_DB.getIssues(parsedAuthToken.id);
if (!userIssues) {
sendJsonResponse(res, ERROR.resourceNotExists, 404);
} else {
sendJsonResponse(res, userIssues, 200);
}
}
} else if (req.method === "POST") {
if (req.headers?.["content-type"] != "application/json") {
sendJsonResponse(res, ERROR.invalidMimeForResource, 415);
return;
}
let issueData: Issue;
try {
let issuePostData = await parseSimplePostData(req);
issueData = JSON.parse(issuePostData.toString());
} catch (error) {
console.error(error);
sendJsonResponse(res, ERROR.badRequest, 400);
return;
}
if (!issueData.lenderid || !issueData.bookid) {
sendJsonResponse(res, ERROR.badRequest, 400);
return;
}
let foundLender = await USER_DB.getUserByID(issueData.lenderid);
let foundBook = await BOOK_DB.getBook(issueData.bookid);
if (!foundLender || !foundBook) {
sendJsonResponse(res, ERROR.resourceNotExists, 404);
return;
}
let foundIssue = await ISSUE_DB.getIssue(
foundLender.id,
foundBook.id,
parsedAuthToken.id
);
if (foundIssue) {
sendJsonResponse(
res,
{
...ERROR.resourceExists,
data: {
id: foundIssue.id,
bookid: foundIssue.bookid,
},
},
409
);
return;
}
let issueid = uuid();
let issueEntry: Issue = {
id: issueid,
borrowerid: parsedAuthToken.id,
lenderid: foundLender.id,
bookid: foundBook.id,
};
const pushed = await ISSUE_DB.pushIssue(issueEntry);
if (!pushed) {
sendJsonResponse(res, ERROR.internalErr, 500);
return;
}
sendJsonResponse(
res,
{
error: null,
message: `successfully created a new issue of id ${issueEntry.id}`,
data: {
id: pushed.id,
borrower: pushed.borrowerid,
lender: pushed.lenderid,
book: foundBook.title,
},
},
201
);
}
} finally {
await ISSUE_DB.close();
await BOOK_DB.close();
await USER_DB.close();
}
}
| src/routes/Issue.ts | Aadv1k-quillia-52c5b34 | [
{
"filename": "src/routes/Books.ts",
"retrieved_chunk": " return ret;\n}\nexport default async function (\n req: http.IncomingMessage,\n res: http.ServerResponse\n) {\n const BOOK_DB = new BookModel();\n const BUCKET = new Bucket();\n await BOOK_DB.init();\n await BUCKET.init();",
"score": 26.54914608657049
},
{
"filename": "src/routes/Books.ts",
"retrieved_chunk": " }\n const parsedAuthToken: any = token.UNSAFE_parse(authToken);\n let body: Buffer;\n body = await parseSimplePostData(req);\n let data: any;\n try {\n data = JSON.parse(body.toString());\n } catch {\n sendJsonResponse(res, ERROR.invalidJSONData, 400);\n return;",
"score": 23.89993984731167
},
{
"filename": "src/routes/Books.ts",
"retrieved_chunk": " try {\n if (req.method === \"GET\") {\n try {\n let userBooks = await BOOK_DB.getBooks();\n userBooks = userBooks.map((e) => {\n delete e.path;\n return e;\n });\n sendJsonResponse(res, userBooks, 200);\n } catch (error) {",
"score": 22.099598841645385
},
{
"filename": "src/routes/Books.ts",
"retrieved_chunk": " const token = new Token();\n const tokenStatus: TokStatus = token.verify(authToken);\n if (\n tokenStatus === TokStatus.INVALID ||\n tokenStatus === TokStatus.INVALID_SIG\n ) {\n sendJsonResponse(res, ERROR.unauthorized, 401);\n return;\n }\n const parsedAuthToken: any = token.UNSAFE_parse(authToken);",
"score": 19.329600258004437
},
{
"filename": "src/routes/Books.ts",
"retrieved_chunk": " id: epubEntry.id,\n },\n },\n 201\n );\n } else if (req.method === \"DELETE\") {\n const authorization = req.headers?.authorization;\n const authToken = authorization?.split(\" \")?.pop();\n if (!authorization || !authToken) {\n sendJsonResponse(res, ERROR.unauthorized, 401);",
"score": 18.0470921090905
}
] | typescript | targetBook = await BOOK_DB.getBook(requestedBook); |
import BookModel from "../models/BookModel";
import Bucket from "../models/Bucket";
import Token from "../lib/GenerateToken";
import { ERROR, MAX_EPUB_SIZE_MB } from "../common/const";
import { TokStatus, Book } from "../common/types";
import {
sendJsonResponse,
parseSimplePostData,
md5,
uuid,
} from "../common/utils";
import filetype from "file-type-cjs";
import fs from "node:fs";
import EPub from "epub";
import os from "node:os";
import path from "node:path";
import crypto from "node:crypto";
import { exec } from "node:child_process";
import http from "node:http";
async function getEpubCoverFromEpubFile_UNIX(
epubFilepath: string
): Promise<[Buffer, string] | null> {
let randomString = crypto.randomBytes(16).toString("hex");
let tempDir = path.join(os.tmpdir(), `tmp-${randomString}`);
fs.mkdirSync(tempDir);
let unzipCMD = `unzip -q ${epubFilepath} -d ${tempDir}`;
let unzipCMDExec = new Promise((resolve, reject) => {
exec(unzipCMD, (err: any, stdout: any, stderr: any) => {
if (err) reject(err);
resolve(stdout);
});
});
try {
await unzipCMDExec;
} catch (err) {
console.error(err);
fs.rmSync(tempDir, { recursive: true }); // we r good boys!
return null;
}
let findCMD = `find ${tempDir} -type f \\( -iname \\*.jpeg -o -iname \\*.jpg -o -iname \\*.png \\) | grep -Ei 'cover\\.|index-1_1'`;
let findCMDExec: Promise<string> = new Promise((resolve, reject) => {
exec(findCMD, (err: any, stdout: any, stderr: any) => {
if (err) reject(err);
resolve(stdout);
});
});
let selectedFilePath: string;
try {
selectedFilePath = await findCMDExec;
selectedFilePath = selectedFilePath.trim();
} catch (err) {
console.error(err);
fs.rmSync(tempDir, { recursive: true }); // we r good boys!
return null;
}
let ret: [Buffer, string] = [
Buffer.from(fs.readFileSync(selectedFilePath)),
selectedFilePath,
];
fs.rmSync(tempDir, { recursive: true }); // we r good boys!
return ret;
}
export default async function (
req: http.IncomingMessage,
res: http.ServerResponse
) {
const BOOK_DB = new BookModel();
const BUCKET = new Bucket();
await BOOK_DB.init();
await BUCKET.init();
try {
if (req.method === "GET") {
try {
let userBooks = await BOOK_DB.getBooks();
userBooks = userBooks.map((e) => {
delete e.path;
return e;
});
sendJsonResponse(res, userBooks, 200);
} catch (error) {
console.error(error);
sendJsonResponse(res, ERROR.internalErr);
}
} else if (req.method === "POST") {
const authorization = req.headers?.authorization;
const authToken = authorization?.split(" ")?.pop();
if (!authorization || !authToken) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const token = new Token();
const tokenStatus: TokStatus = token.verify(authToken);
if (
tokenStatus === TokStatus.INVALID ||
tokenStatus === TokStatus.INVALID_SIG
) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const parsedAuthToken: any = token.UNSAFE_parse(authToken);
let epubBuffer: Buffer;
epubBuffer = await parseSimplePostData(req);
let epubSizeInMB = Math.ceil(epubBuffer.length / 1e6);
let bufferMime = await filetype.fromBuffer(epubBuffer);
if (bufferMime.mime != "application/epub+zip") {
sendJsonResponse(res, ERROR.invalidMimeForResource, 415);
return;
}
if (epubSizeInMB > MAX_EPUB_SIZE_MB) {
sendJsonResponse(res, ERROR.fileTooLarge, 400);
return;
}
let randomString = crypto.randomBytes(16).toString("hex");
const tempEpubFilePath = path.join(os.tmpdir(), `tmp-${randomString}.epub`);
fs.writeFileSync(tempEpubFilePath, epubBuffer);
const epub: any = await new Promise((resolve, reject) => {
const epub = new EPub(tempEpubFilePath);
epub.on("end", () => resolve(epub));
epub.on("error", reject);
epub.parse();
});
let epubCoverBuffer = await getEpubCoverFromEpubFile_UNIX(tempEpubFilePath);
console.log(epubCoverBuffer);
let epubSignature = md5(epubBuffer.toString("hex"));
let foundBook = await BOOK_DB.getBook("", epubSignature);
if (foundBook) {
sendJsonResponse(
res,
{
...ERROR.resourceExists,
data: {
id: foundBook.id,
},
},
409
);
return;
}
let epubFilePermalink = await BUCKET.pushBufferWithName(
epubBuffer,
`${epubSignature}.epub`
);
let epubCoverPermalink = null;
if (epubCoverBuffer) {
epubCoverPermalink = await BUCKET.pushBufferWithName(
epubCoverBuffer[0],
`${epubSignature}.${epubCoverBuffer[1].split(".").pop()}`
);
}
let epubID = uuid();
let epubEntry: Book = {
id: epubID,
userid: parsedAuthToken.id,
title: epub.metadata?.title ?? epubID.split("-").pop(),
author: epub.metadata?.creator ?? parsedAuthToken.email,
path: epubFilePermalink,
signature: epubSignature,
cover: epubCoverPermalink,
};
| const pushed = await BOOK_DB.pushBook(epubEntry); |
if (!pushed) {
sendJsonResponse(res, ERROR.internalErr, 500);
return;
}
sendJsonResponse(
res,
{
error: null,
message: `successfully published a book of id ${epubEntry.id}`,
data: {
id: epubEntry.id,
},
},
201
);
} else if (req.method === "DELETE") {
const authorization = req.headers?.authorization;
const authToken = authorization?.split(" ")?.pop();
if (!authorization || !authToken) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const token = new Token();
const tokenStatus: TokStatus = token.verify(authToken);
if (
tokenStatus === TokStatus.INVALID ||
tokenStatus === TokStatus.INVALID_SIG
) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const parsedAuthToken: any = token.UNSAFE_parse(authToken);
let body: Buffer;
body = await parseSimplePostData(req);
let data: any;
try {
data = JSON.parse(body.toString());
} catch {
sendJsonResponse(res, ERROR.invalidJSONData, 400);
return;
}
if (!data.bookid) {
sendJsonResponse(res, ERROR.badRequest, 400);
return;
}
let bookDeleted = await BOOK_DB.deleteBook(data.bookid, parsedAuthToken.id);
if (!bookDeleted) {
sendJsonResponse(res, {
error: "unable-to-delete-book",
message: `was unable to delete book ${data.bookid}, perhaps the id was invalid?`,
status: 404
}, 404)
return;
}
sendJsonResponse(res, {
error: null,
message: `successfully deleted book of id ${data.bookid}`,
status: 204,
data: {
id: data.bookid,
}
}, 204)
}
} finally {
await BOOK_DB.close();
}
}
| src/routes/Books.ts | Aadv1k-quillia-52c5b34 | [
{
"filename": "src/common/types.ts",
"retrieved_chunk": "}\nexport interface Book {\n id: string,\n userid: string,\n title: string,\n author: string,\n path: string,\n signature: string\n cover?: string,\n}",
"score": 29.141797630248952
},
{
"filename": "src/models/BookModel.ts",
"retrieved_chunk": " } catch (error) {\n console.error(error);\n return;\n }\n }\n async pushBook(book: Book): Promise<Book | null> {\n try {\n await this.client.query(`\n INSERT INTO books (id, userid, author, title, path, cover, signature) \n VALUES ($1, $2, $3, $4, $5, $6, $7)`, ",
"score": 27.037008247420122
},
{
"filename": "src/models/BookModel.ts",
"retrieved_chunk": " [book.id, book.userid, book.author, book.title, book.path, book?.cover ?? \"\", book.signature]\n )\n return book;\n } catch (error) {\n console.error(error); \n return null;\n }\n }\n async deleteBook(bookid: string, userid?: string) {\n try {",
"score": 21.724444086329136
},
{
"filename": "src/models/BookModel.ts",
"retrieved_chunk": " userid VARCHAR(255) UNIQUE NOT NULL,\n title VARCHAR(255) NOT NULL,\n author VARCHAR(255) NOT NULL,\n signature VARCHAR(255) NOT NULL,\n path VARCHAR(255) NOT NULL,\n cover VARCHAR(255) NOT NULL\n )\n `);\n } catch (error) {\n throw error;",
"score": 17.39863405486049
},
{
"filename": "src/routes/Issue.ts",
"retrieved_chunk": " id: issueid,\n borrowerid: parsedAuthToken.id,\n lenderid: foundLender.id,\n bookid: foundBook.id,\n };\n const pushed = await ISSUE_DB.pushIssue(issueEntry);\n if (!pushed) {\n sendJsonResponse(res, ERROR.internalErr, 500);\n return;\n }",
"score": 16.26327409019853
}
] | typescript | const pushed = await BOOK_DB.pushBook(epubEntry); |
import IssueModel from "../models/IssueModel";
import BookModel from "../models/BookModel";
import UserModel from "../models/UserModel";
import Token from "../lib/GenerateToken";
import { ERROR } from "../common/const";
import { TokStatus, Issue } from "../common/types";
import {
sendJsonResponse,
sendEpubResponse,
parseSimplePostData,
uuid,
getBufferFromRawURL,
} from "../common/utils";
import http from "node:http";
import https from "node:https";
export default async function (
req: http.IncomingMessage,
res: http.ServerResponse
) {
const ISSUE_DB = new IssueModel();
const BOOK_DB = new BookModel();
const USER_DB = new UserModel();
const authorization = req.headers?.authorization;
const authToken = authorization?.split(" ")?.pop()?.trim();
try {
if (req.method === "OPTIONS") {
sendJsonResponse(res, {}, 200);
return;
}
if (!authorization || !authToken) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
const token = new Token();
const tokenStatus: TokStatus = token.verify(authToken);
if (
tokenStatus === TokStatus.INVALID ||
tokenStatus === TokStatus.INVALID_SIG
) {
sendJsonResponse(res, ERROR.unauthorized, 401);
return;
}
await ISSUE_DB.init();
await BOOK_DB.init();
await USER_DB.init();
const parsedAuthToken: any = token.UNSAFE_parse(authToken);
if (req.method === "GET") {
let URLParams = req.url.split("/").slice(3);
let requestedBook = URLParams?.[0];
if (requestedBook) {
let targetBook = await BOOK_DB.getBook(requestedBook);
if (!targetBook) {
sendJsonResponse(res, ERROR.resourceNotExists, 404);
return;
}
let epubResourcePath = targetBook.path;
const response: Array<Buffer> = await new Promise((resolve, reject) => {
https.get(epubResourcePath, (res) => {
let data: Array<Buffer> = [];
res.on("data", (d: Buffer) => data.push(d));
res.on("end", () => resolve(data));
res.on("error", (error) => reject(error));
});
});
let epubBuffer = Buffer.concat(response);
sendEpubResponse(res, epubBuffer);
return;
} else {
| let userIssues = await ISSUE_DB.getIssues(parsedAuthToken.id); |
if (!userIssues) {
sendJsonResponse(res, ERROR.resourceNotExists, 404);
} else {
sendJsonResponse(res, userIssues, 200);
}
}
} else if (req.method === "POST") {
if (req.headers?.["content-type"] != "application/json") {
sendJsonResponse(res, ERROR.invalidMimeForResource, 415);
return;
}
let issueData: Issue;
try {
let issuePostData = await parseSimplePostData(req);
issueData = JSON.parse(issuePostData.toString());
} catch (error) {
console.error(error);
sendJsonResponse(res, ERROR.badRequest, 400);
return;
}
if (!issueData.lenderid || !issueData.bookid) {
sendJsonResponse(res, ERROR.badRequest, 400);
return;
}
let foundLender = await USER_DB.getUserByID(issueData.lenderid);
let foundBook = await BOOK_DB.getBook(issueData.bookid);
if (!foundLender || !foundBook) {
sendJsonResponse(res, ERROR.resourceNotExists, 404);
return;
}
let foundIssue = await ISSUE_DB.getIssue(
foundLender.id,
foundBook.id,
parsedAuthToken.id
);
if (foundIssue) {
sendJsonResponse(
res,
{
...ERROR.resourceExists,
data: {
id: foundIssue.id,
bookid: foundIssue.bookid,
},
},
409
);
return;
}
let issueid = uuid();
let issueEntry: Issue = {
id: issueid,
borrowerid: parsedAuthToken.id,
lenderid: foundLender.id,
bookid: foundBook.id,
};
const pushed = await ISSUE_DB.pushIssue(issueEntry);
if (!pushed) {
sendJsonResponse(res, ERROR.internalErr, 500);
return;
}
sendJsonResponse(
res,
{
error: null,
message: `successfully created a new issue of id ${issueEntry.id}`,
data: {
id: pushed.id,
borrower: pushed.borrowerid,
lender: pushed.lenderid,
book: foundBook.title,
},
},
201
);
}
} finally {
await ISSUE_DB.close();
await BOOK_DB.close();
await USER_DB.close();
}
}
| src/routes/Issue.ts | Aadv1k-quillia-52c5b34 | [
{
"filename": "src/common/utils.ts",
"retrieved_chunk": "export async function getBufferFromRawURL(resourceUrl: string): Promise<Buffer | null> {\n let url = new URL(resourceUrl);\n try {\n let buffArr: Buffer[] = await new Promise((resolve, reject) => {\n let func = url.protocol === \"https:\" ? https : http;\n func.get(url, (res) => {\n let data: Buffer[] = [];\n res.on(\"data\", (d: Buffer) => data.push(d))\n res.on(\"error\", reject)\n res.on(\"end\", () => resolve(data))",
"score": 51.62034885746632
},
{
"filename": "src/common/utils.ts",
"retrieved_chunk": " resolve([fields, files]);\n })\n })\n}\nexport function parseSimplePostData(req: http.IncomingMessage): Promise<Buffer> {\n return new Promise((resolve, reject) => {\n let data: Buffer[] = [];\n req.on(\"data\", (chunk: Buffer) => data.push(chunk))\n req.on(\"end\", () => { \n const buf = Buffer.concat(data);",
"score": 42.23082390598137
},
{
"filename": "src/routes/Books.ts",
"retrieved_chunk": " return;\n }\n let randomString = crypto.randomBytes(16).toString(\"hex\");\n const tempEpubFilePath = path.join(os.tmpdir(), `tmp-${randomString}.epub`);\n fs.writeFileSync(tempEpubFilePath, epubBuffer);\n const epub: any = await new Promise((resolve, reject) => {\n const epub = new EPub(tempEpubFilePath);\n epub.on(\"end\", () => resolve(epub));\n epub.on(\"error\", reject);\n epub.parse();",
"score": 25.4324721609606
},
{
"filename": "src/common/utils.ts",
"retrieved_chunk": " resolve(buf);\n });\n req.on(\"error\", reject);\n })\n}\nexport function md5(data: string): string {\n return crypto\n .createHash(\"md5\")\n .update(data)\n .digest(\"hex\");",
"score": 23.772206417120344
},
{
"filename": "src/routes/Books.ts",
"retrieved_chunk": " let epubBuffer: Buffer;\n epubBuffer = await parseSimplePostData(req);\n let epubSizeInMB = Math.ceil(epubBuffer.length / 1e6);\n let bufferMime = await filetype.fromBuffer(epubBuffer);\n if (bufferMime.mime != \"application/epub+zip\") {\n sendJsonResponse(res, ERROR.invalidMimeForResource, 415);\n return;\n }\n if (epubSizeInMB > MAX_EPUB_SIZE_MB) {\n sendJsonResponse(res, ERROR.fileTooLarge, 400);",
"score": 19.771757838223508
}
] | typescript | let userIssues = await ISSUE_DB.getIssues(parsedAuthToken.id); |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.