prefix
stringlengths 82
32.6k
| middle
stringlengths 5
470
| suffix
stringlengths 0
81.2k
| file_path
stringlengths 6
168
| repo_name
stringlengths 16
77
| context
listlengths 5
5
| lang
stringclasses 4
values | ground_truth
stringlengths 5
470
|
---|---|---|---|---|---|---|---|
import sourceToCST, {
ConcreteAttributeNode,
ConcreteElementOpeningTagNode,
ConcreteElementSelfClosingTagNode,
ConcreteLiquidDropNode,
ConcreteNode,
ConcreteNodeTypes,
ConcreteTextNode,
} from '../1-source-to-cst';
import { UnknownConcreteNodeTypeError } from '../errors';
import ASTBuilder from './ast-builder';
export type BasicNode<T> = {
type: T;
locStart: number;
locEnd: number;
source: string;
};
export enum NodeTypes {
TextNode = 'TextNode',
LiquidDropNode = 'LiquidDropNode',
ElementNode = 'ElementNode',
AttributeDoubleQuoted = 'AttributeDoubleQuoted',
AttributeSingleQuoted = 'AttributeSingleQuoted',
AttributeUnquoted = 'AttributeUnquoted',
AttributeEmpty = 'AttributeEmpty',
}
export type TextNode = {
value: string;
} & BasicNode<NodeTypes.TextNode>;
export type LiquidDropNode = {
value: string;
} & BasicNode<NodeTypes.LiquidDropNode>;
export type LiquidXNode = TextNode | LiquidDropNode | ElementNode | AttributeNode;
export type ElementNode = {
name: string;
source: string;
attributes: AttributeNode[];
children: LiquidXNode[];
} & BasicNode<NodeTypes.ElementNode>;
export type AttributeNode =
| AttributeDoubleQuoted
| AttributeSingleQuoted
| AttributeUnquoted
| AttributeEmpty;
export type AttributeNodeBase<T> = {
name: TextNode;
value: TextNode | LiquidDropNode;
} & BasicNode<T>;
export type AttributeDoubleQuoted = {} & AttributeNodeBase<NodeTypes.AttributeDoubleQuoted>;
export type AttributeSingleQuoted = {} & AttributeNodeBase<NodeTypes.AttributeSingleQuoted>;
export type AttributeUnquoted = {} & AttributeNodeBase<NodeTypes.AttributeUnquoted>;
export type AttributeEmpty = { name: TextNode } & BasicNode<NodeTypes.AttributeEmpty>;
function toTextNode(node: ConcreteTextNode): TextNode {
return {
type: NodeTypes.TextNode,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
value: node.value,
};
}
function toLiquidDropNode(node: ConcreteLiquidDropNode): LiquidDropNode {
return {
type: NodeTypes.LiquidDropNode,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
value: node.value,
};
}
function toElementNode(
node: ConcreteElementOpeningTagNode | ConcreteElementSelfClosingTagNode,
): ElementNode {
return {
type: NodeTypes.ElementNode,
locStart: node.locStart,
locEnd: node.locEnd,
name: node.name,
source: node.source,
attributes: toAttributes(node.attributes),
children: [],
};
}
function toAttributes(attributes: ConcreteAttributeNode[]) {
return cstToAST(attributes) as AttributeNode[];
}
function toAttributeValue(value: ConcreteTextNode | ConcreteLiquidDropNode) {
return cstToAST([value])[0] as TextNode | LiquidDropNode;
}
function isAttributeNode(node: any): boolean {
return (
node.type === ConcreteNodeTypes.AttributeDoubleQuoted ||
node.type === ConcreteNodeTypes.AttributeSingleQuoted ||
node.type === ConcreteNodeTypes.AttributeUnquoted ||
node.type === ConcreteNodeTypes.AttributeEmpty
);
}
function cstToAST(cst: ConcreteNode[] | ConcreteAttributeNode[]) {
if (cst.length === 0) return [];
const | astBuilder = new ASTBuilder(cst[0].source); |
for (let i = 0; i < cst.length; i += 1) {
const node = cst[i];
const prevNode = cst[i - 1];
// Add whitespaces and linebreaks that went missing after parsing. We don't need to do this
// if the node is an attribute since whitespaces between attributes is not important to preserve.
// In fact it would probably break the rendered output due to unexpected text nodes.
// TODO: This should be handled in the grammar/source-to-cst part instead (if possible).
if (prevNode?.source && !isAttributeNode(node)) {
const diff = node.locStart - prevNode.locEnd;
if (diff > 0) {
astBuilder.push(
toTextNode({
type: ConcreteNodeTypes.TextNode,
locStart: prevNode.locEnd,
locEnd: node.locStart,
source: node.source,
value: prevNode.source.slice(prevNode.locEnd, node.locStart),
}),
);
}
}
switch (node.type) {
case ConcreteNodeTypes.TextNode: {
astBuilder.push(toTextNode(node));
break;
}
case ConcreteNodeTypes.LiquidDropNode: {
astBuilder.push(toLiquidDropNode(node));
break;
}
case ConcreteNodeTypes.ElementOpeningTag: {
astBuilder.open(toElementNode(node));
break;
}
case ConcreteNodeTypes.ElementClosingTag: {
astBuilder.close(node, NodeTypes.ElementNode);
break;
}
case ConcreteNodeTypes.ElementSelfClosingTag: {
astBuilder.open(toElementNode(node));
astBuilder.close(node, NodeTypes.ElementNode);
break;
}
case ConcreteNodeTypes.AttributeDoubleQuoted:
case ConcreteNodeTypes.AttributeSingleQuoted:
case ConcreteNodeTypes.AttributeUnquoted: {
const attributeNode: AttributeDoubleQuoted | AttributeSingleQuoted | AttributeUnquoted = {
type: node.type as unknown as
| NodeTypes.AttributeDoubleQuoted
| NodeTypes.AttributeSingleQuoted
| NodeTypes.AttributeUnquoted,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
name: cstToAST([node.name])[0] as TextNode,
value: toAttributeValue(node.value),
};
astBuilder.push(attributeNode);
break;
}
case ConcreteNodeTypes.AttributeEmpty: {
const attributeNode: AttributeEmpty = {
type: NodeTypes.AttributeEmpty,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
name: cstToAST([node.name])[0] as TextNode,
};
astBuilder.push(attributeNode);
break;
}
default: {
throw new UnknownConcreteNodeTypeError(
'',
(node as any)?.source,
(node as any)?.locStart,
(node as any)?.locEnd,
);
}
}
}
return astBuilder.finish();
}
export default function sourceToAST(source: string): LiquidXNode[] {
const cst = sourceToCST(source);
const ast = cstToAST(cst);
return ast;
}
| src/parser/2-cst-to-ast/index.ts | unshopable-liquidx-a101873 | [
{
"filename": "src/renderer/index.ts",
"retrieved_chunk": " case NodeTypes.ElementNode: {\n output += renderElement(node, { withSource, isChildOfElementNode });\n break;\n }\n case NodeTypes.AttributeDoubleQuoted:\n case NodeTypes.AttributeSingleQuoted:\n case NodeTypes.AttributeUnquoted: {\n const name = renderText(node.name);\n let value = null;\n if (node.value.type === NodeTypes.TextNode) {",
"score": 0.827281653881073
},
{
"filename": "src/parser/1-source-to-cst/index.ts",
"retrieved_chunk": " | ConcreteAttributeSingleQuoted\n | ConcreteAttributeUnquoted\n | ConcreteAttributeEmpty;\nexport type ConcreteAttributeDoubleQuoted =\n {} & ConcreteAttributeNodeBase<ConcreteNodeTypes.AttributeDoubleQuoted>;\nexport type ConcreteAttributeSingleQuoted =\n {} & ConcreteAttributeNodeBase<ConcreteNodeTypes.AttributeSingleQuoted>;\nexport type ConcreteAttributeUnquoted =\n {} & ConcreteAttributeNodeBase<ConcreteNodeTypes.AttributeUnquoted>;\nexport type ConcreteAttributeEmpty = {",
"score": 0.8130304217338562
},
{
"filename": "src/parser/1-source-to-cst/index.ts",
"retrieved_chunk": " attributeName: textNode,\n attributeDoubleQuotedValue: 0,\n attributeSingleQuotedValue: 0,\n attributeUnquotedValue: 0,\n attributeDoubleQuotedTextNode: textNode,\n attributeSingleQuotedTextNode: textNode,\n attributeUnquotedTextNode: textNode,\n };\n const cst = toAST(matchResult, mapping) as ConcreteNode[];\n return cst;",
"score": 0.8017815947532654
},
{
"filename": "src/parser/1-source-to-cst/index.ts",
"retrieved_chunk": "export type ConcreteElementSelfClosingTagNode = {\n name: string;\n attributes: ConcreteAttributeNode[];\n} & ConcreteBasicNode<ConcreteNodeTypes.ElementSelfClosingTag>;\nexport type ConcreteAttributeNodeBase<T> = {\n name: ConcreteTextNode;\n value: ConcreteTextNode;\n} & ConcreteBasicNode<T>;\nexport type ConcreteAttributeNode =\n | ConcreteAttributeDoubleQuoted",
"score": 0.7856545448303223
},
{
"filename": "src/parser/1-source-to-cst/index.ts",
"retrieved_chunk": " AttributeDoubleQuoted = 'AttributeDoubleQuoted',\n AttributeSingleQuoted = 'AttributeSingleQuoted',\n AttributeUnquoted = 'AttributeUnquoted',\n AttributeEmpty = 'AttributeEmpty',\n}\nexport type ConcreteNode =\n | ConcreteTextNode\n | ConcreteLiquidDropNode\n | ConcreteElementOpeningTagNode\n | ConcreteElementClosingTagNode",
"score": 0.7823784947395325
}
] | typescript | astBuilder = new ASTBuilder(cst[0].source); |
import { type IResult, type Request } from 'mssql';
import type { StoredProcedureParameter, StoredProcedureSchema, ILogger } from '../types';
import { type DatabaseExecutor } from '../executor';
import { convertSqlValueToJsValue } from '../utils';
/**
* A manager for stored procedure metadata.
* Handles the retrieval and caching of stored procedure metadata.
*/
export class StoredProcedureMetadataManager {
/**
* Regular expression to extract MSSQL stored procedure names.
* See https://regex101.com/r/cMsTyT/1 for this regex.
*/
private static readonly storedProcedureNameRegex =
/((?:(?:\[([\w\s]+)\]|(\w+))\.)?(?:\[([\w\s]+)\]|(\w+))\.(?:\[([\w\s]+)\]|(\w+)))/i;
/**
* Matches any comments from the Stored Procedure definition.
* See https://regex101.com/r/dxA7n0/1 for this regex.
*/
private static readonly commentRegex = /(?:\s*-{2}.+\s*$)|(?:\/\*([\s\S]*?)\*\/)/gm;
/**
* Matches the parameters from the Stored Procedure definition.
* See https://regex101.com/r/4TaTky/1 for this regex.
*/
private static readonly parameterSectionRegex =
/(?<=(?:CREATE|ALTER)\s+PROCEDURE)\s+((?:(?:\[([\w\s]+)\]|(\w+))\.)?(?:\[([\w\s]+)\]|(\w+))\.(?:\[([\w\s]+)\]|(\w+)))(.*?)(?=(?:AS|FOR\s+REPLICATION)[^\w])/is;
/**
* See https://regex101.com/r/iMEaLb/1 for this regex.
* Match the individual parameters in the Parameter Definition.
*/
private static readonly parameterDefinitionRegex = /(@[\w]+)\s+([^\s]+)\s*=\s*([^, ]*),?/gi;
constructor(private readonly _databaseExecutor: DatabaseExecutor) {}
/**
* Parses the stored procedure parameter schema into a StoredProcedureParameter array.
* @param {string} storedProcedureName - The name of the stored procedure to retrieve the parameter schema for.
* @returns A Promise that resolves to the result of the stored procedure execution.
*/
public async getStoredProcedureParameterSchema(
storedProcedureName: string,
logger | : ILogger,
): Promise<IResult<StoredProcedureSchema>> { |
return await this._databaseExecutor.executeQueryRequest(async (request: Request) => {
// Remove square bracket notation if any, and split into schema and name.
const schemaAndName = storedProcedureName.replace(/\[|\]/g, '').split('.');
const result = await request.query<StoredProcedureSchema>(
'SELECT ' +
'PARAMETER_NAME as name, ' +
'DATA_TYPE as type, ' +
'PARAMETER_MODE as mode, ' +
'CHARACTER_MAXIMUM_LENGTH length, ' +
'NUMERIC_PRECISION as precision, ' +
'NUMERIC_SCALE as scale ' +
'FROM INFORMATION_SCHEMA.PARAMETERS ' +
`WHERE SPECIFIC_SCHEMA = '${schemaAndName[0]}' AND SPECIFIC_NAME = '${schemaAndName[1]}';
SELECT OBJECT_DEFINITION(OBJECT_ID('${storedProcedureName}')) AS storedProcedureDefinition;`,
);
const recordSetLength = result.recordsets.length as number;
if (recordSetLength < 1 || recordSetLength > 2) {
throw new Error(
`Could not retrieve stored procedure parameter schema from Database for stored procedure ${storedProcedureName}.`,
);
}
if (recordSetLength !== 2 || result.recordsets[1].length !== 1) {
throw new Error(
`Could not retrieve stored procedure definition from Database for stored procedure ${storedProcedureName}.`,
);
}
return result;
}, logger);
}
/**
* Parses the stored procedure parameter schema into a StoredProcedureParameter array.
* @param {string} storedProcedureName - The name of the stored procedure to parse the parameter schema for.
* @param {IResult<StoredProcedureSchema>} schemaResult - The result of the stored procedure parameter schema query.
* @returns A StoredProcedureParameter array.
*/
public parseStoredProcedureParameters(
storedProcedureName: string,
schemaResult: IResult<StoredProcedureSchema>,
): IterableIterator<StoredProcedureParameter> {
const parameterSchemaMap: Map<string, StoredProcedureParameter> =
schemaResult.recordsets[0].reduce(
(parameterMap: Map<string, StoredProcedureParameter>, item: StoredProcedureParameter) => {
parameterMap.set(item.name, item);
return parameterMap;
},
new Map<string, StoredProcedureParameter>(),
);
const storedProcedureDefinition = schemaResult.recordsets[1][0].storedProcedureDefinition;
if (storedProcedureDefinition == null) {
throw new Error(
`Could not parse stored procedure definition for stored procedure ${storedProcedureName}.`,
);
}
const commentStrippedStoredProcedureDefinition = storedProcedureDefinition.replace(
StoredProcedureMetadataManager.commentRegex,
'',
);
if (commentStrippedStoredProcedureDefinition === '') {
throw new Error(
`Could not parse stored procedure comments from definition for stored procedure ${storedProcedureName}.`,
);
}
const parameterSection = commentStrippedStoredProcedureDefinition.match(
StoredProcedureMetadataManager.parameterSectionRegex,
);
if (parameterSection === null || parameterSection.length !== 9) {
throw new Error(
`Could not parse stored procedure parameters from definition for stored procedure ${storedProcedureName}.`,
);
}
const parameterDefinition = parameterSection[8];
let parameterDefinitionMatch;
while (
(parameterDefinitionMatch =
StoredProcedureMetadataManager.parameterDefinitionRegex.exec(parameterDefinition)) !== null
) {
const name = parameterDefinitionMatch[1];
const type = parameterDefinitionMatch[2];
const defaultValue = parameterDefinitionMatch[3];
const parameter = parameterSchemaMap.get(name);
if (parameter !== undefined) {
parameter.defaultValue = convertSqlValueToJsValue(defaultValue, type);
}
}
return parameterSchemaMap.values();
}
}
| src/lib/stored-procedure/stored-procedure-metadata-manager.ts | Falven-mssql-data-source-bca6621 | [
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " * procedure results to the correct schema field names.\n * @param {ILogger} logger - The logger to use for logging.\n * @returns A Promise that resolves to the result of the stored procedure execution.\n */\n public async executeStoredProcedure<T>(\n storedProcedureName: string,\n input: InputParameters,\n request: Request,\n logger: ILogger,\n info?: GraphQLResolveInfo,",
"score": 0.9420072436332703
},
{
"filename": "src/lib/datasource/mssql-datasource.ts",
"retrieved_chunk": " * @template T - This type parameter represents the type of the value returned by the resolver procedure.\n * @param {string} storedProcedureName - The name of the stored procedure to execute.\n * @param {StoredProcedureInput} input - The input parameters for the stored procedure.\n * @param {GraphQLResolveInfo | undefined} info - If provided, will be used to case-insensitively map the stored\n * procedure results to the correct schema field names.\n * @returns A Promise that resolves to the result of the stored procedure execution.\n */\n public async executeStoredProcedureQuery<T>(\n storedProcedureName: string,\n input: InputParameters,",
"score": 0.9348859190940857
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " }\n /**\n * Prepares the stored procedure request.\n * @param {IterableIterator<StoredProcedureParameter>} storedProcedureParameters - The stored procedure parameters.\n * @param {StoredProcedureInput} input - The input object.\n * @param {Request} request - The request object.\n * @returns A prepared request object.\n */\n private prepareStoredProcedureRequest(\n storedProcedureParameters: IterableIterator<StoredProcedureParameter>,",
"score": 0.919461190700531
},
{
"filename": "src/lib/datasource/mssql-datasource.ts",
"retrieved_chunk": " /**\n * Executes a stored procedure for a Mutation operation with the provided input parameters, and returns the result.\n * @template T - This type parameter represents the type of the value returned by the resolver procedure.\n * @param {string} storedProcedureName - The name of the stored procedure to execute.\n * @param {StoredProcedureInput} input - The input parameters for the stored procedure.\n * @param {GraphQLResolveInfo | undefined} info - If provided, will be used to case-insensitively map the stored\n * procedure results to the correct schema field names.\n * @returns A Promise that resolves to the result of the stored procedure execution.\n */\n public async executeStoredProcedureMutation<T>(",
"score": 0.9153305292129517
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " private readonly _storedProcedureMetadataManager: StoredProcedureMetadataManager,\n ) {}\n /**\n * Executes a stored procedure with the provided input parameters, and returns the result.\n * @template TVal - The type of records in the result set.\n * @template TRet - The type of the result object to be returned.\n * @param {string} storedProcedureName - The name of the stored procedure to execute.\n * @param {StoredProcedureInput} input - The input parameters for the stored procedure.\n * @param {Request} request - The request to execute the stored procedure.\n * @param {GraphQLResolveInfo | undefined} info - If provided, will be used to case-insensitively map the stored",
"score": 0.911805272102356
}
] | typescript | : ILogger,
): Promise<IResult<StoredProcedureSchema>> { |
import { type IResult, type Request } from 'mssql';
import type { StoredProcedureParameter, StoredProcedureSchema, ILogger } from '../types';
import { type DatabaseExecutor } from '../executor';
import { convertSqlValueToJsValue } from '../utils';
/**
* A manager for stored procedure metadata.
* Handles the retrieval and caching of stored procedure metadata.
*/
export class StoredProcedureMetadataManager {
/**
* Regular expression to extract MSSQL stored procedure names.
* See https://regex101.com/r/cMsTyT/1 for this regex.
*/
private static readonly storedProcedureNameRegex =
/((?:(?:\[([\w\s]+)\]|(\w+))\.)?(?:\[([\w\s]+)\]|(\w+))\.(?:\[([\w\s]+)\]|(\w+)))/i;
/**
* Matches any comments from the Stored Procedure definition.
* See https://regex101.com/r/dxA7n0/1 for this regex.
*/
private static readonly commentRegex = /(?:\s*-{2}.+\s*$)|(?:\/\*([\s\S]*?)\*\/)/gm;
/**
* Matches the parameters from the Stored Procedure definition.
* See https://regex101.com/r/4TaTky/1 for this regex.
*/
private static readonly parameterSectionRegex =
/(?<=(?:CREATE|ALTER)\s+PROCEDURE)\s+((?:(?:\[([\w\s]+)\]|(\w+))\.)?(?:\[([\w\s]+)\]|(\w+))\.(?:\[([\w\s]+)\]|(\w+)))(.*?)(?=(?:AS|FOR\s+REPLICATION)[^\w])/is;
/**
* See https://regex101.com/r/iMEaLb/1 for this regex.
* Match the individual parameters in the Parameter Definition.
*/
private static readonly parameterDefinitionRegex = /(@[\w]+)\s+([^\s]+)\s*=\s*([^, ]*),?/gi;
constructor(private readonly _databaseExecutor: DatabaseExecutor) {}
/**
* Parses the stored procedure parameter schema into a StoredProcedureParameter array.
* @param {string} storedProcedureName - The name of the stored procedure to retrieve the parameter schema for.
* @returns A Promise that resolves to the result of the stored procedure execution.
*/
public async getStoredProcedureParameterSchema(
storedProcedureName: string,
logger: ILogger,
): Promise<IResult<StoredProcedureSchema>> {
return await this._databaseExecutor.executeQueryRequest(async (request: Request) => {
// Remove square bracket notation if any, and split into schema and name.
const schemaAndName = storedProcedureName.replace(/\[|\]/g, '').split('.');
const result = await request.query<StoredProcedureSchema>(
'SELECT ' +
'PARAMETER_NAME as name, ' +
'DATA_TYPE as type, ' +
'PARAMETER_MODE as mode, ' +
'CHARACTER_MAXIMUM_LENGTH length, ' +
'NUMERIC_PRECISION as precision, ' +
'NUMERIC_SCALE as scale ' +
'FROM INFORMATION_SCHEMA.PARAMETERS ' +
`WHERE SPECIFIC_SCHEMA = '${schemaAndName[0]}' AND SPECIFIC_NAME = '${schemaAndName[1]}';
SELECT OBJECT_DEFINITION(OBJECT_ID('${storedProcedureName}')) AS storedProcedureDefinition;`,
);
const recordSetLength = result.recordsets.length as number;
if (recordSetLength < 1 || recordSetLength > 2) {
throw new Error(
`Could not retrieve stored procedure parameter schema from Database for stored procedure ${storedProcedureName}.`,
);
}
if (recordSetLength !== 2 || result.recordsets[1].length !== 1) {
throw new Error(
`Could not retrieve stored procedure definition from Database for stored procedure ${storedProcedureName}.`,
);
}
return result;
}, logger);
}
/**
* Parses the stored procedure parameter schema into a StoredProcedureParameter array.
* @param {string} storedProcedureName - The name of the stored procedure to parse the parameter schema for.
* @param {IResult<StoredProcedureSchema>} schemaResult - The result of the stored procedure parameter schema query.
* @returns A StoredProcedureParameter array.
*/
public parseStoredProcedureParameters(
storedProcedureName: string,
schemaResult: IResult<StoredProcedureSchema>,
| ): IterableIterator<StoredProcedureParameter> { |
const parameterSchemaMap: Map<string, StoredProcedureParameter> =
schemaResult.recordsets[0].reduce(
(parameterMap: Map<string, StoredProcedureParameter>, item: StoredProcedureParameter) => {
parameterMap.set(item.name, item);
return parameterMap;
},
new Map<string, StoredProcedureParameter>(),
);
const storedProcedureDefinition = schemaResult.recordsets[1][0].storedProcedureDefinition;
if (storedProcedureDefinition == null) {
throw new Error(
`Could not parse stored procedure definition for stored procedure ${storedProcedureName}.`,
);
}
const commentStrippedStoredProcedureDefinition = storedProcedureDefinition.replace(
StoredProcedureMetadataManager.commentRegex,
'',
);
if (commentStrippedStoredProcedureDefinition === '') {
throw new Error(
`Could not parse stored procedure comments from definition for stored procedure ${storedProcedureName}.`,
);
}
const parameterSection = commentStrippedStoredProcedureDefinition.match(
StoredProcedureMetadataManager.parameterSectionRegex,
);
if (parameterSection === null || parameterSection.length !== 9) {
throw new Error(
`Could not parse stored procedure parameters from definition for stored procedure ${storedProcedureName}.`,
);
}
const parameterDefinition = parameterSection[8];
let parameterDefinitionMatch;
while (
(parameterDefinitionMatch =
StoredProcedureMetadataManager.parameterDefinitionRegex.exec(parameterDefinition)) !== null
) {
const name = parameterDefinitionMatch[1];
const type = parameterDefinitionMatch[2];
const defaultValue = parameterDefinitionMatch[3];
const parameter = parameterSchemaMap.get(name);
if (parameter !== undefined) {
parameter.defaultValue = convertSqlValueToJsValue(defaultValue, type);
}
}
return parameterSchemaMap.values();
}
}
| src/lib/stored-procedure/stored-procedure-metadata-manager.ts | Falven-mssql-data-source-bca6621 | [
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " }\n /**\n * Prepares the stored procedure request.\n * @param {IterableIterator<StoredProcedureParameter>} storedProcedureParameters - The stored procedure parameters.\n * @param {StoredProcedureInput} input - The input object.\n * @param {Request} request - The request object.\n * @returns A prepared request object.\n */\n private prepareStoredProcedureRequest(\n storedProcedureParameters: IterableIterator<StoredProcedureParameter>,",
"score": 0.9315205812454224
},
{
"filename": "src/lib/datasource/mssql-datasource.ts",
"retrieved_chunk": " * @template T - This type parameter represents the type of the value returned by the resolver procedure.\n * @param {string} storedProcedureName - The name of the stored procedure to execute.\n * @param {StoredProcedureInput} input - The input parameters for the stored procedure.\n * @param {GraphQLResolveInfo | undefined} info - If provided, will be used to case-insensitively map the stored\n * procedure results to the correct schema field names.\n * @returns A Promise that resolves to the result of the stored procedure execution.\n */\n public async executeStoredProcedureQuery<T>(\n storedProcedureName: string,\n input: InputParameters,",
"score": 0.920464038848877
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " * procedure results to the correct schema field names.\n * @param {ILogger} logger - The logger to use for logging.\n * @returns A Promise that resolves to the result of the stored procedure execution.\n */\n public async executeStoredProcedure<T>(\n storedProcedureName: string,\n input: InputParameters,\n request: Request,\n logger: ILogger,\n info?: GraphQLResolveInfo,",
"score": 0.9146190881729126
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " private readonly _storedProcedureMetadataManager: StoredProcedureMetadataManager,\n ) {}\n /**\n * Executes a stored procedure with the provided input parameters, and returns the result.\n * @template TVal - The type of records in the result set.\n * @template TRet - The type of the result object to be returned.\n * @param {string} storedProcedureName - The name of the stored procedure to execute.\n * @param {StoredProcedureInput} input - The input parameters for the stored procedure.\n * @param {Request} request - The request to execute the stored procedure.\n * @param {GraphQLResolveInfo | undefined} info - If provided, will be used to case-insensitively map the stored",
"score": 0.9036562442779541
},
{
"filename": "src/lib/datasource/mssql-datasource.ts",
"retrieved_chunk": " /**\n * Executes a stored procedure for a Mutation operation with the provided input parameters, and returns the result.\n * @template T - This type parameter represents the type of the value returned by the resolver procedure.\n * @param {string} storedProcedureName - The name of the stored procedure to execute.\n * @param {StoredProcedureInput} input - The input parameters for the stored procedure.\n * @param {GraphQLResolveInfo | undefined} info - If provided, will be used to case-insensitively map the stored\n * procedure results to the correct schema field names.\n * @returns A Promise that resolves to the result of the stored procedure execution.\n */\n public async executeStoredProcedureMutation<T>(",
"score": 0.8955270051956177
}
] | typescript | ): IterableIterator<StoredProcedureParameter> { |
import { type IResult, type Request } from 'mssql';
import type { StoredProcedureParameter, StoredProcedureSchema, ILogger } from '../types';
import { type DatabaseExecutor } from '../executor';
import { convertSqlValueToJsValue } from '../utils';
/**
* A manager for stored procedure metadata.
* Handles the retrieval and caching of stored procedure metadata.
*/
export class StoredProcedureMetadataManager {
/**
* Regular expression to extract MSSQL stored procedure names.
* See https://regex101.com/r/cMsTyT/1 for this regex.
*/
private static readonly storedProcedureNameRegex =
/((?:(?:\[([\w\s]+)\]|(\w+))\.)?(?:\[([\w\s]+)\]|(\w+))\.(?:\[([\w\s]+)\]|(\w+)))/i;
/**
* Matches any comments from the Stored Procedure definition.
* See https://regex101.com/r/dxA7n0/1 for this regex.
*/
private static readonly commentRegex = /(?:\s*-{2}.+\s*$)|(?:\/\*([\s\S]*?)\*\/)/gm;
/**
* Matches the parameters from the Stored Procedure definition.
* See https://regex101.com/r/4TaTky/1 for this regex.
*/
private static readonly parameterSectionRegex =
/(?<=(?:CREATE|ALTER)\s+PROCEDURE)\s+((?:(?:\[([\w\s]+)\]|(\w+))\.)?(?:\[([\w\s]+)\]|(\w+))\.(?:\[([\w\s]+)\]|(\w+)))(.*?)(?=(?:AS|FOR\s+REPLICATION)[^\w])/is;
/**
* See https://regex101.com/r/iMEaLb/1 for this regex.
* Match the individual parameters in the Parameter Definition.
*/
private static readonly parameterDefinitionRegex = /(@[\w]+)\s+([^\s]+)\s*=\s*([^, ]*),?/gi;
constructor(private readonly _databaseExecutor: DatabaseExecutor) {}
/**
* Parses the stored procedure parameter schema into a StoredProcedureParameter array.
* @param {string} storedProcedureName - The name of the stored procedure to retrieve the parameter schema for.
* @returns A Promise that resolves to the result of the stored procedure execution.
*/
public async getStoredProcedureParameterSchema(
storedProcedureName: string,
logger: ILogger,
): Promise<IResult<StoredProcedureSchema>> {
return await this._databaseExecutor.executeQueryRequest(async (request: Request) => {
// Remove square bracket notation if any, and split into schema and name.
const schemaAndName = storedProcedureName.replace(/\[|\]/g, '').split('.');
const result = await request.query<StoredProcedureSchema>(
'SELECT ' +
'PARAMETER_NAME as name, ' +
'DATA_TYPE as type, ' +
'PARAMETER_MODE as mode, ' +
'CHARACTER_MAXIMUM_LENGTH length, ' +
'NUMERIC_PRECISION as precision, ' +
'NUMERIC_SCALE as scale ' +
'FROM INFORMATION_SCHEMA.PARAMETERS ' +
`WHERE SPECIFIC_SCHEMA = '${schemaAndName[0]}' AND SPECIFIC_NAME = '${schemaAndName[1]}';
SELECT OBJECT_DEFINITION(OBJECT_ID('${storedProcedureName}')) AS storedProcedureDefinition;`,
);
const recordSetLength = result.recordsets.length as number;
if (recordSetLength < 1 || recordSetLength > 2) {
throw new Error(
`Could not retrieve stored procedure parameter schema from Database for stored procedure ${storedProcedureName}.`,
);
}
if (recordSetLength !== 2 || result.recordsets[1].length !== 1) {
throw new Error(
`Could not retrieve stored procedure definition from Database for stored procedure ${storedProcedureName}.`,
);
}
return result;
}, logger);
}
/**
* Parses the stored procedure parameter schema into a StoredProcedureParameter array.
* @param {string} storedProcedureName - The name of the stored procedure to parse the parameter schema for.
* @param {IResult<StoredProcedureSchema>} schemaResult - The result of the stored procedure parameter schema query.
* @returns A StoredProcedureParameter array.
*/
public parseStoredProcedureParameters(
storedProcedureName: string,
schemaResult: IResult<StoredProcedureSchema>,
): | IterableIterator<StoredProcedureParameter> { |
const parameterSchemaMap: Map<string, StoredProcedureParameter> =
schemaResult.recordsets[0].reduce(
(parameterMap: Map<string, StoredProcedureParameter>, item: StoredProcedureParameter) => {
parameterMap.set(item.name, item);
return parameterMap;
},
new Map<string, StoredProcedureParameter>(),
);
const storedProcedureDefinition = schemaResult.recordsets[1][0].storedProcedureDefinition;
if (storedProcedureDefinition == null) {
throw new Error(
`Could not parse stored procedure definition for stored procedure ${storedProcedureName}.`,
);
}
const commentStrippedStoredProcedureDefinition = storedProcedureDefinition.replace(
StoredProcedureMetadataManager.commentRegex,
'',
);
if (commentStrippedStoredProcedureDefinition === '') {
throw new Error(
`Could not parse stored procedure comments from definition for stored procedure ${storedProcedureName}.`,
);
}
const parameterSection = commentStrippedStoredProcedureDefinition.match(
StoredProcedureMetadataManager.parameterSectionRegex,
);
if (parameterSection === null || parameterSection.length !== 9) {
throw new Error(
`Could not parse stored procedure parameters from definition for stored procedure ${storedProcedureName}.`,
);
}
const parameterDefinition = parameterSection[8];
let parameterDefinitionMatch;
while (
(parameterDefinitionMatch =
StoredProcedureMetadataManager.parameterDefinitionRegex.exec(parameterDefinition)) !== null
) {
const name = parameterDefinitionMatch[1];
const type = parameterDefinitionMatch[2];
const defaultValue = parameterDefinitionMatch[3];
const parameter = parameterSchemaMap.get(name);
if (parameter !== undefined) {
parameter.defaultValue = convertSqlValueToJsValue(defaultValue, type);
}
}
return parameterSchemaMap.values();
}
}
| src/lib/stored-procedure/stored-procedure-metadata-manager.ts | Falven-mssql-data-source-bca6621 | [
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " }\n /**\n * Prepares the stored procedure request.\n * @param {IterableIterator<StoredProcedureParameter>} storedProcedureParameters - The stored procedure parameters.\n * @param {StoredProcedureInput} input - The input object.\n * @param {Request} request - The request object.\n * @returns A prepared request object.\n */\n private prepareStoredProcedureRequest(\n storedProcedureParameters: IterableIterator<StoredProcedureParameter>,",
"score": 0.9063209295272827
},
{
"filename": "src/lib/datasource/mssql-datasource.ts",
"retrieved_chunk": " * @template T - This type parameter represents the type of the value returned by the resolver procedure.\n * @param {string} storedProcedureName - The name of the stored procedure to execute.\n * @param {StoredProcedureInput} input - The input parameters for the stored procedure.\n * @param {GraphQLResolveInfo | undefined} info - If provided, will be used to case-insensitively map the stored\n * procedure results to the correct schema field names.\n * @returns A Promise that resolves to the result of the stored procedure execution.\n */\n public async executeStoredProcedureQuery<T>(\n storedProcedureName: string,\n input: InputParameters,",
"score": 0.9005828499794006
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " * procedure results to the correct schema field names.\n * @param {ILogger} logger - The logger to use for logging.\n * @returns A Promise that resolves to the result of the stored procedure execution.\n */\n public async executeStoredProcedure<T>(\n storedProcedureName: string,\n input: InputParameters,\n request: Request,\n logger: ILogger,\n info?: GraphQLResolveInfo,",
"score": 0.8940293192863464
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " const preparedResult = this.prepareStoredProcedureResult(result, info);\n logPerformance(logger, 'prepareStoredProcedureResult', startTime);\n return preparedResult;\n }\n private prepareParameters(\n storedProcedureParameters: IterableIterator<StoredProcedureParameter>,\n input: InputParameters,\n ): Map<string, PreparedStoredProcedureParameter> {\n // We want to use the inferred DB Stored Procedure schema as the source of truth.\n const preparedParameters = new Map<string, PreparedStoredProcedureParameter>();",
"score": 0.88971346616745
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " private readonly _storedProcedureMetadataManager: StoredProcedureMetadataManager,\n ) {}\n /**\n * Executes a stored procedure with the provided input parameters, and returns the result.\n * @template TVal - The type of records in the result set.\n * @template TRet - The type of the result object to be returned.\n * @param {string} storedProcedureName - The name of the stored procedure to execute.\n * @param {StoredProcedureInput} input - The input parameters for the stored procedure.\n * @param {Request} request - The request to execute the stored procedure.\n * @param {GraphQLResolveInfo | undefined} info - If provided, will be used to case-insensitively map the stored",
"score": 0.8833606243133545
}
] | typescript | IterableIterator<StoredProcedureParameter> { |
import { camelCase } from 'lodash';
import { type Request, type IProcedureResult, type IResult, type IRecordSet } from 'mssql';
import { type GraphQLResolveInfo } from 'graphql';
import {
type DriverType,
type PreparedStoredProcedureParameter,
ParameterMode,
type StoredProcedureSchema,
type StoredProcedureParameter,
type ILogger,
type InputParameters,
} from '../types';
import { mapDbTypeToDriverType, replacer } from '../utils';
import { logExecutionBegin, logPerformance, logSafely } from '../logging';
import {
type StoredProcedureCacheManager,
type StoredProcedureMetadataManager,
} from '../stored-procedure';
import { type IResolverProcedureResult } from '../types/i-resolver-procedure-result';
import { getNodeSelectionSetNames, getFieldNamesExcludingNode } from '../utils/graphql-helper';
/**
* StoredProcedureManager provides methods to interact
* with a Microsoft SQL Server database for managing stored procedures.
*/
export class StoredProcedureManager {
/**
* Creates a new instance of StoredProcedureManager.
*/
constructor(
private readonly _storedProcedureCacheManager: StoredProcedureCacheManager,
private readonly _storedProcedureMetadataManager: StoredProcedureMetadataManager,
) {}
/**
* Executes a stored procedure with the provided input parameters, and returns the result.
* @template TVal - The type of records in the result set.
* @template TRet - The type of the result object to be returned.
* @param {string} storedProcedureName - The name of the stored procedure to execute.
* @param {StoredProcedureInput} input - The input parameters for the stored procedure.
* @param {Request} request - The request to execute the stored procedure.
* @param {GraphQLResolveInfo | undefined} info - If provided, will be used to case-insensitively map the stored
* procedure results to the correct schema field names.
* @param {ILogger} logger - The logger to use for logging.
* @returns A Promise that resolves to the result of the stored procedure execution.
*/
public async executeStoredProcedure<T>(
storedProcedureName: string,
input: InputParameters,
request: Request,
logger: ILogger,
info?: GraphQLResolveInfo,
): Promise<IResolverProcedureResult<T>> {
let startTime = performance.now();
let schema = (await this._storedProcedureCacheManager.tryGetFromCache(storedProcedureName)) as
| IResult<StoredProcedureSchema>
| undefined;
if (schema === undefined) {
logSafely(
logger,
'info',
// Yellow
`\x1b[33mCache miss occurred while retrieving the cached schema for ${storedProcedureName}\x1b[0m`,
);
schema = await this._storedProcedureMetadataManager.getStoredProcedureParameterSchema(
storedProcedureName,
logger,
);
await this._storedProcedureCacheManager.addToCache(storedProcedureName, schema);
} else {
logSafely(
logger,
'info',
// Green
`\x1b[32mCache hit occurred while retrieving the cached schema for ${storedProcedureName}\x1b[0m`,
);
}
logPerformance(logger, 'getStoredProcedureParameterSchema', startTime);
startTime = performance.now();
const storedProcedureParameters =
this._storedProcedureMetadataManager.parseStoredProcedureParameters(
storedProcedureName,
schema,
);
logPerformance(logger, 'parseStoredProcedureParameters', startTime);
startTime = performance.now();
const preparedRequest = this.prepareStoredProcedureRequest(
storedProcedureParameters,
input,
request,
);
logPerformance(logger, 'prepareStoredProcedureRequest', startTime);
startTime = performance.now();
logExecutionBegin(
logger,
`Stored Procedure ${storedProcedureName} with parameters`,
preparedRequest.parameters,
// Green
'32m',
);
const result = await preparedRequest.execute(storedProcedureName);
startTime = performance.now();
const preparedResult = this.prepareStoredProcedureResult(result, info);
logPerformance(logger, 'prepareStoredProcedureResult', startTime);
return preparedResult;
}
private prepareParameters(
storedProcedureParameters: IterableIterator<StoredProcedureParameter>,
input: InputParameters,
): Map<string, PreparedStoredProcedureParameter> {
// We want to use the inferred DB Stored Procedure schema as the source of truth.
const preparedParameters = new Map<string, PreparedStoredProcedureParameter>();
for (const spParameter of storedProcedureParameters) {
const { name, type, length, precision, scale, ...rest } = spParameter;
const parameterName = name.slice(1);
// Let's use the parameter name in lowercase as the lookup key.
preparedParameters.set(parameterName.toLowerCase(), {
name: parameterName,
type | : mapDbTypeToDriverType({ |
type,
length,
precision,
scale,
}) as DriverType,
value: undefined,
...rest,
});
}
// Populate our input values into the request parameters.
const inputParameters = input as Record<string, unknown>;
for (const inputParameterKey in inputParameters) {
const preparedParameter = preparedParameters.get(inputParameterKey.toLowerCase());
if (preparedParameter != null) {
preparedParameter.value = inputParameters[inputParameterKey];
}
// We don't care about provided input parameters that are missing in the Stored Procedure definition.
}
return preparedParameters;
}
private getMissingRequiredParameters(
parameters: Map<string, PreparedStoredProcedureParameter>,
): PreparedStoredProcedureParameter[] {
// Check what required parameters are missing.
const missingRequiredParameters = [];
for (const parameter of parameters.values()) {
// If they have a default value they can be ommitted from the request.
if (parameter.defaultValue === undefined && parameter.value === undefined) {
missingRequiredParameters.push(parameter);
}
}
return missingRequiredParameters;
}
private addParametersToRequest(
parameters: Map<string, PreparedStoredProcedureParameter>,
request: Request,
): Request {
const preparedRequest = request;
for (const parameter of parameters.values()) {
const { name, type, mode, value, defaultValue } = parameter;
if (defaultValue !== undefined && value === undefined) {
continue;
}
const modeEnum = mode;
if (modeEnum === ParameterMode.IN) {
preparedRequest.input(name, type, value);
} else if (modeEnum === ParameterMode.INOUT) {
preparedRequest.output(name, type, value);
} else {
throw new Error(`Unknown parameter mode: ${mode}`);
}
}
return preparedRequest;
}
/**
* Prepares the stored procedure request.
* @param {IterableIterator<StoredProcedureParameter>} storedProcedureParameters - The stored procedure parameters.
* @param {StoredProcedureInput} input - The input object.
* @param {Request} request - The request object.
* @returns A prepared request object.
*/
private prepareStoredProcedureRequest(
storedProcedureParameters: IterableIterator<StoredProcedureParameter>,
input: InputParameters,
request: Request,
): Request {
const parameters = this.prepareParameters(storedProcedureParameters, input);
const missingRequiredParameters = this.getMissingRequiredParameters(parameters);
const missingLength = missingRequiredParameters.length;
if (missingLength > 0) {
throw new Error(
`Missing ${missingLength} required parameters: ${missingRequiredParameters
.map((param) => JSON.stringify(param, replacer, 0))
.join(', ')}.`,
);
}
const preparedRequest = this.addParametersToRequest(parameters, request);
return preparedRequest;
}
/**
* Maps the keys of an object based on the provided mapping.
* @template T - The type of the original object.
* @param {T} obj - The object whose keys need to be mapped.
* @param {Record<string, string>} mapping - A dictionary containing the mapping of the original keys to the new keys.
* @returns {T} A new object with the keys mapped according to the provided mapping.
*/
private mapKeysWithMapping<T extends Record<string, unknown>>(
obj: T,
mapping: Record<string, string>,
): T {
const result: Record<string, unknown> = {};
for (const key in obj) {
const mappedKey = mapping[key.toLowerCase()] ?? camelCase(key);
result[mappedKey] = obj[key];
}
return result as T;
}
/**
* Prepares the stored procedure result into a GraphQL result object.
* @param {IProcedureResult} result - The stored procedure result.
* @param {GraphQLResolveInfo | undefined} info - If provided, will be used to case-insensitively map the stored
* procedure results to the correct schema field names.
* @returns {IResolverProcedureResult} A prepared GraphQL result object.
*/
private prepareStoredProcedureResult<T extends Record<string, unknown>>(
result: IProcedureResult<T>,
info?: GraphQLResolveInfo,
): IResolverProcedureResult<T> {
const { resultSetFields, outputFields } =
info !== undefined
? {
resultSetFields: getNodeSelectionSetNames(info, 'resultSets'),
outputFields: getFieldNamesExcludingNode(info, 'resultSets'),
}
: { resultSetFields: {}, outputFields: {} };
const resultSets = result.recordsets.map((recordset: IRecordSet<Record<string, unknown>>) => {
return recordset.map((record: Record<string, unknown>) =>
this.mapKeysWithMapping(record, resultSetFields),
);
});
const output = this.mapKeysWithMapping(result.output, outputFields);
const preparedResult = {
returnValue: result.returnValue,
resultSets: resultSets as T[][],
rowsAffected: result.rowsAffected,
...output,
};
return preparedResult;
}
}
| src/lib/stored-procedure/stored-procedure-manager.ts | Falven-mssql-data-source-bca6621 | [
{
"filename": "src/lib/stored-procedure/stored-procedure-metadata-manager.ts",
"retrieved_chunk": " ): Promise<IResult<StoredProcedureSchema>> {\n return await this._databaseExecutor.executeQueryRequest(async (request: Request) => {\n // Remove square bracket notation if any, and split into schema and name.\n const schemaAndName = storedProcedureName.replace(/\\[|\\]/g, '').split('.');\n const result = await request.query<StoredProcedureSchema>(\n 'SELECT ' +\n 'PARAMETER_NAME as name, ' +\n 'DATA_TYPE as type, ' +\n 'PARAMETER_MODE as mode, ' +\n 'CHARACTER_MAXIMUM_LENGTH length, ' +",
"score": 0.8629451394081116
},
{
"filename": "src/lib/stored-procedure/stored-procedure-metadata-manager.ts",
"retrieved_chunk": " `Could not parse stored procedure comments from definition for stored procedure ${storedProcedureName}.`,\n );\n }\n const parameterSection = commentStrippedStoredProcedureDefinition.match(\n StoredProcedureMetadataManager.parameterSectionRegex,\n );\n if (parameterSection === null || parameterSection.length !== 9) {\n throw new Error(\n `Could not parse stored procedure parameters from definition for stored procedure ${storedProcedureName}.`,\n );",
"score": 0.8487143516540527
},
{
"filename": "src/lib/stored-procedure/stored-procedure-metadata-manager.ts",
"retrieved_chunk": " /**\n * Parses the stored procedure parameter schema into a StoredProcedureParameter array.\n * @param {string} storedProcedureName - The name of the stored procedure to parse the parameter schema for.\n * @param {IResult<StoredProcedureSchema>} schemaResult - The result of the stored procedure parameter schema query.\n * @returns A StoredProcedureParameter array.\n */\n public parseStoredProcedureParameters(\n storedProcedureName: string,\n schemaResult: IResult<StoredProcedureSchema>,\n ): IterableIterator<StoredProcedureParameter> {",
"score": 0.8441939353942871
},
{
"filename": "src/lib/stored-procedure/stored-procedure-metadata-manager.ts",
"retrieved_chunk": " private static readonly parameterDefinitionRegex = /(@[\\w]+)\\s+([^\\s]+)\\s*=\\s*([^, ]*),?/gi;\n constructor(private readonly _databaseExecutor: DatabaseExecutor) {}\n /**\n * Parses the stored procedure parameter schema into a StoredProcedureParameter array.\n * @param {string} storedProcedureName - The name of the stored procedure to retrieve the parameter schema for.\n * @returns A Promise that resolves to the result of the stored procedure execution.\n */\n public async getStoredProcedureParameterSchema(\n storedProcedureName: string,\n logger: ILogger,",
"score": 0.8420987129211426
},
{
"filename": "src/lib/stored-procedure/stored-procedure-metadata-manager.ts",
"retrieved_chunk": " }\n const parameterDefinition = parameterSection[8];\n let parameterDefinitionMatch;\n while (\n (parameterDefinitionMatch =\n StoredProcedureMetadataManager.parameterDefinitionRegex.exec(parameterDefinition)) !== null\n ) {\n const name = parameterDefinitionMatch[1];\n const type = parameterDefinitionMatch[2];\n const defaultValue = parameterDefinitionMatch[3];",
"score": 0.8397889137268066
}
] | typescript | : mapDbTypeToDriverType({ |
import sourceToCST, {
ConcreteAttributeNode,
ConcreteElementOpeningTagNode,
ConcreteElementSelfClosingTagNode,
ConcreteLiquidDropNode,
ConcreteNode,
ConcreteNodeTypes,
ConcreteTextNode,
} from '../1-source-to-cst';
import { UnknownConcreteNodeTypeError } from '../errors';
import ASTBuilder from './ast-builder';
export type BasicNode<T> = {
type: T;
locStart: number;
locEnd: number;
source: string;
};
export enum NodeTypes {
TextNode = 'TextNode',
LiquidDropNode = 'LiquidDropNode',
ElementNode = 'ElementNode',
AttributeDoubleQuoted = 'AttributeDoubleQuoted',
AttributeSingleQuoted = 'AttributeSingleQuoted',
AttributeUnquoted = 'AttributeUnquoted',
AttributeEmpty = 'AttributeEmpty',
}
export type TextNode = {
value: string;
} & BasicNode<NodeTypes.TextNode>;
export type LiquidDropNode = {
value: string;
} & BasicNode<NodeTypes.LiquidDropNode>;
export type LiquidXNode = TextNode | LiquidDropNode | ElementNode | AttributeNode;
export type ElementNode = {
name: string;
source: string;
attributes: AttributeNode[];
children: LiquidXNode[];
} & BasicNode<NodeTypes.ElementNode>;
export type AttributeNode =
| AttributeDoubleQuoted
| AttributeSingleQuoted
| AttributeUnquoted
| AttributeEmpty;
export type AttributeNodeBase<T> = {
name: TextNode;
value: TextNode | LiquidDropNode;
} & BasicNode<T>;
export type AttributeDoubleQuoted = {} & AttributeNodeBase<NodeTypes.AttributeDoubleQuoted>;
export type AttributeSingleQuoted = {} & AttributeNodeBase<NodeTypes.AttributeSingleQuoted>;
export type AttributeUnquoted = {} & AttributeNodeBase<NodeTypes.AttributeUnquoted>;
export type AttributeEmpty = { name: TextNode } & BasicNode<NodeTypes.AttributeEmpty>;
function toTextNode(node: ConcreteTextNode): TextNode {
return {
type: NodeTypes.TextNode,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
value: node.value,
};
}
function toLiquidDropNode(node: ConcreteLiquidDropNode): LiquidDropNode {
return {
type: NodeTypes.LiquidDropNode,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
value: node.value,
};
}
function toElementNode(
node: ConcreteElementOpeningTagNode | ConcreteElementSelfClosingTagNode,
): ElementNode {
return {
type: NodeTypes.ElementNode,
locStart: node.locStart,
locEnd: node.locEnd,
name: node.name,
source: node.source,
attributes: toAttributes(node.attributes),
children: [],
};
}
function toAttributes(attributes: ConcreteAttributeNode[]) {
return cstToAST(attributes) as AttributeNode[];
}
function toAttributeValue(value: ConcreteTextNode | ConcreteLiquidDropNode) {
return cstToAST([value])[0] as TextNode | LiquidDropNode;
}
function isAttributeNode(node: any): boolean {
return (
node.type === ConcreteNodeTypes.AttributeDoubleQuoted ||
node.type === ConcreteNodeTypes.AttributeSingleQuoted ||
node.type === ConcreteNodeTypes.AttributeUnquoted ||
node.type === ConcreteNodeTypes.AttributeEmpty
);
}
function cstToAST(cst: ConcreteNode[] | ConcreteAttributeNode[]) {
if (cst.length === 0) return [];
const astBuilder = new ASTBuilder(cst[0].source);
for (let i = 0; i < cst.length; i += 1) {
const node = cst[i];
const prevNode = cst[i - 1];
// Add whitespaces and linebreaks that went missing after parsing. We don't need to do this
// if the node is an attribute since whitespaces between attributes is not important to preserve.
// In fact it would probably break the rendered output due to unexpected text nodes.
// TODO: This should be handled in the grammar/source-to-cst part instead (if possible).
if (prevNode?.source && !isAttributeNode(node)) {
const diff = node.locStart - prevNode.locEnd;
if (diff > 0) {
astBuilder.push(
toTextNode({
type: ConcreteNodeTypes.TextNode,
locStart: prevNode.locEnd,
locEnd: node.locStart,
source: node.source,
value: prevNode.source.slice(prevNode.locEnd, node.locStart),
}),
);
}
}
switch (node.type) {
case ConcreteNodeTypes.TextNode: {
astBuilder.push(toTextNode(node));
break;
}
case ConcreteNodeTypes.LiquidDropNode: {
astBuilder.push(toLiquidDropNode(node));
break;
}
case ConcreteNodeTypes.ElementOpeningTag: {
astBuilder.open(toElementNode(node));
break;
}
case ConcreteNodeTypes.ElementClosingTag: {
| astBuilder.close(node, NodeTypes.ElementNode); |
break;
}
case ConcreteNodeTypes.ElementSelfClosingTag: {
astBuilder.open(toElementNode(node));
astBuilder.close(node, NodeTypes.ElementNode);
break;
}
case ConcreteNodeTypes.AttributeDoubleQuoted:
case ConcreteNodeTypes.AttributeSingleQuoted:
case ConcreteNodeTypes.AttributeUnquoted: {
const attributeNode: AttributeDoubleQuoted | AttributeSingleQuoted | AttributeUnquoted = {
type: node.type as unknown as
| NodeTypes.AttributeDoubleQuoted
| NodeTypes.AttributeSingleQuoted
| NodeTypes.AttributeUnquoted,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
name: cstToAST([node.name])[0] as TextNode,
value: toAttributeValue(node.value),
};
astBuilder.push(attributeNode);
break;
}
case ConcreteNodeTypes.AttributeEmpty: {
const attributeNode: AttributeEmpty = {
type: NodeTypes.AttributeEmpty,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
name: cstToAST([node.name])[0] as TextNode,
};
astBuilder.push(attributeNode);
break;
}
default: {
throw new UnknownConcreteNodeTypeError(
'',
(node as any)?.source,
(node as any)?.locStart,
(node as any)?.locEnd,
);
}
}
}
return astBuilder.finish();
}
export default function sourceToAST(source: string): LiquidXNode[] {
const cst = sourceToCST(source);
const ast = cstToAST(cst);
return ast;
}
| src/parser/2-cst-to-ast/index.ts | unshopable-liquidx-a101873 | [
{
"filename": "src/renderer/index.ts",
"retrieved_chunk": " case NodeTypes.ElementNode: {\n output += renderElement(node, { withSource, isChildOfElementNode });\n break;\n }\n case NodeTypes.AttributeDoubleQuoted:\n case NodeTypes.AttributeSingleQuoted:\n case NodeTypes.AttributeUnquoted: {\n const name = renderText(node.name);\n let value = null;\n if (node.value.type === NodeTypes.TextNode) {",
"score": 0.8165275454521179
},
{
"filename": "src/parser/1-source-to-cst/index.ts",
"retrieved_chunk": "export type ConcreteLiquidDropNode = {\n value: string;\n} & ConcreteBasicNode<ConcreteNodeTypes.LiquidDropNode>;\nexport type ConcreteElementOpeningTagNode = {\n name: string;\n attributes: ConcreteAttributeNode[];\n} & ConcreteBasicNode<ConcreteNodeTypes.ElementOpeningTag>;\nexport type ConcreteElementClosingTagNode = {\n name: string;\n} & ConcreteBasicNode<ConcreteNodeTypes.ElementClosingTag>;",
"score": 0.8083203434944153
},
{
"filename": "src/parser/1-source-to-cst/index.ts",
"retrieved_chunk": " locEnd,\n source,\n value: 2,\n },\n liquidDropValue: (node: Node) => node.sourceString.trimEnd(),\n ElementNode: 0,\n ElementOpeningTag: {\n type: ConcreteNodeTypes.ElementOpeningTag,\n locStart,\n locEnd,",
"score": 0.7865442037582397
},
{
"filename": "src/parser/1-source-to-cst/index.ts",
"retrieved_chunk": " return this.sourceString;\n },\n source,\n };\n const mapping: Mapping = {\n Node: 0,\n TextNode: textNode,\n liquidDropNode: {\n type: ConcreteNodeTypes.LiquidDropNode,\n locStart,",
"score": 0.7794702053070068
},
{
"filename": "src/renderer/index.ts",
"retrieved_chunk": " value = JSON.stringify(renderText(node.value));\n } else {\n value = renderLiquidDrop(node.value);\n }\n output += `${name}: ${value}`;\n break;\n }\n case NodeTypes.AttributeEmpty: {\n const name = renderText(node.name);\n const value = true;",
"score": 0.756109893321991
}
] | typescript | astBuilder.close(node, NodeTypes.ElementNode); |
import { Node } from 'ohm-js';
import { toAST } from 'ohm-js/extras';
import { CSTParsingError } from '../errors';
import grammar from '../grammar';
export enum ConcreteNodeTypes {
TextNode = 'TextNode',
LiquidDropNode = 'LiquidDropNode',
ElementOpeningTag = 'ElementOpeningTag',
ElementClosingTag = 'ElementClosingTag',
ElementSelfClosingTag = 'ElementSelfClosingTag',
AttributeDoubleQuoted = 'AttributeDoubleQuoted',
AttributeSingleQuoted = 'AttributeSingleQuoted',
AttributeUnquoted = 'AttributeUnquoted',
AttributeEmpty = 'AttributeEmpty',
}
export type ConcreteNode =
| ConcreteTextNode
| ConcreteLiquidDropNode
| ConcreteElementOpeningTagNode
| ConcreteElementClosingTagNode
| ConcreteElementSelfClosingTagNode;
export type ConcreteBasicNode<T> = {
type: T;
locStart: number;
locEnd: number;
source: string;
};
export type ConcreteTextNode = {
value: string;
} & ConcreteBasicNode<ConcreteNodeTypes.TextNode>;
export type ConcreteLiquidDropNode = {
value: string;
} & ConcreteBasicNode<ConcreteNodeTypes.LiquidDropNode>;
export type ConcreteElementOpeningTagNode = {
name: string;
attributes: ConcreteAttributeNode[];
} & ConcreteBasicNode<ConcreteNodeTypes.ElementOpeningTag>;
export type ConcreteElementClosingTagNode = {
name: string;
} & ConcreteBasicNode<ConcreteNodeTypes.ElementClosingTag>;
export type ConcreteElementSelfClosingTagNode = {
name: string;
attributes: ConcreteAttributeNode[];
} & ConcreteBasicNode<ConcreteNodeTypes.ElementSelfClosingTag>;
export type ConcreteAttributeNodeBase<T> = {
name: ConcreteTextNode;
value: ConcreteTextNode;
} & ConcreteBasicNode<T>;
export type ConcreteAttributeNode =
| ConcreteAttributeDoubleQuoted
| ConcreteAttributeSingleQuoted
| ConcreteAttributeUnquoted
| ConcreteAttributeEmpty;
export type ConcreteAttributeDoubleQuoted =
{} & ConcreteAttributeNodeBase<ConcreteNodeTypes.AttributeDoubleQuoted>;
export type ConcreteAttributeSingleQuoted =
{} & ConcreteAttributeNodeBase<ConcreteNodeTypes.AttributeSingleQuoted>;
export type ConcreteAttributeUnquoted =
{} & ConcreteAttributeNodeBase<ConcreteNodeTypes.AttributeUnquoted>;
export type ConcreteAttributeEmpty = {
name: ConcreteTextNode;
} & ConcreteBasicNode<ConcreteNodeTypes.AttributeEmpty>;
export type CST = ConcreteNode[];
export type TemplateMapping = {
type: ConcreteNodeTypes;
locStart: (node: Node[]) => number;
locEnd: (node: Node[]) => number;
source: string;
[k: string]: string | number | boolean | object | null;
};
export type TopLevelFunctionMapping = (...nodes: Node[]) => any;
export type Mapping = {
[k: string]: number | TemplateMapping | TopLevelFunctionMapping;
};
function locStart(nodes: Node[]) {
return nodes[0].source.startIdx;
}
function locEnd(nodes: Node[]) {
return nodes[nodes.length - 1].source.endIdx;
}
export default function sourceToCST(source: string): ConcreteNode[] {
const matchResult = grammar.match(source);
if (matchResult.failed()) {
| throw new CSTParsingError(matchResult); |
}
const textNode = {
type: ConcreteNodeTypes.TextNode,
locStart,
locEnd,
value: function (this: Node) {
return this.sourceString;
},
source,
};
const mapping: Mapping = {
Node: 0,
TextNode: textNode,
liquidDropNode: {
type: ConcreteNodeTypes.LiquidDropNode,
locStart,
locEnd,
source,
value: 2,
},
liquidDropValue: (node: Node) => node.sourceString.trimEnd(),
ElementNode: 0,
ElementOpeningTag: {
type: ConcreteNodeTypes.ElementOpeningTag,
locStart,
locEnd,
name: 1,
attributes: 2,
source,
},
ElementClosingTag: {
type: ConcreteNodeTypes.ElementClosingTag,
locStart,
locEnd,
name: 1,
source,
},
ElementSelfClosingTag: {
type: ConcreteNodeTypes.ElementSelfClosingTag,
locStart,
locEnd,
name: 1,
attributes: 2,
source,
},
AttributeDoubleQuoted: {
type: ConcreteNodeTypes.AttributeDoubleQuoted,
locStart,
locEnd,
source,
name: 0,
value: 3,
},
AttributeSingleQuoted: {
type: ConcreteNodeTypes.AttributeSingleQuoted,
locStart,
locEnd,
source,
name: 0,
value: 3,
},
AttributeUnquoted: {
type: ConcreteNodeTypes.AttributeUnquoted,
locStart,
locEnd,
source,
name: 0,
value: 2,
},
AttributeEmpty: {
type: ConcreteNodeTypes.AttributeEmpty,
locStart,
locEnd,
source,
name: 0,
},
attributeName: textNode,
attributeDoubleQuotedValue: 0,
attributeSingleQuotedValue: 0,
attributeUnquotedValue: 0,
attributeDoubleQuotedTextNode: textNode,
attributeSingleQuotedTextNode: textNode,
attributeUnquotedTextNode: textNode,
};
const cst = toAST(matchResult, mapping) as ConcreteNode[];
return cst;
}
| src/parser/1-source-to-cst/index.ts | unshopable-liquidx-a101873 | [
{
"filename": "src/parser/2-cst-to-ast/ast-builder.ts",
"retrieved_chunk": " source: string;\n constructor(source: string) {\n this.ast = [];\n this.cursor = [];\n this.source = source;\n }\n get current(): LiquidXNode[] {\n return deepGet<LiquidXNode[]>(this.cursor, this.ast);\n }\n get currentPosition(): number {",
"score": 0.7791578769683838
},
{
"filename": "src/renderer/index.ts",
"retrieved_chunk": "}\nfunction renderEndMarker(node: ElementNode) {\n return `{% # LIQUIDX:END - SOURCE ${JSON.stringify(\n node.source.slice(node.locStart, node.locEnd),\n )} %}`;\n}\nfunction renderElement(\n node: ElementNode,\n { withSource = false, isChildOfElementNode = false } = {},\n) {",
"score": 0.7754380702972412
},
{
"filename": "src/parser/2-cst-to-ast/index.ts",
"retrieved_chunk": " }\n return astBuilder.finish();\n}\nexport default function sourceToAST(source: string): LiquidXNode[] {\n const cst = sourceToCST(source);\n const ast = cstToAST(cst);\n return ast;\n}",
"score": 0.7708130478858948
},
{
"filename": "src/parser/2-cst-to-ast/ast-builder.ts",
"retrieved_chunk": " return (this.current || []).length - 1;\n }\n get parent(): ElementNode | undefined {\n if (this.cursor.length == 0) return undefined;\n return deepGet<ElementNode>(dropLast(1, this.cursor), this.ast);\n }\n open(node: ElementNode) {\n this.push(node);\n this.cursor.push(this.currentPosition);\n this.cursor.push('children');",
"score": 0.7708128690719604
},
{
"filename": "src/parser/2-cst-to-ast/__tests__/utils.ts",
"retrieved_chunk": "import sourceToAST from '../';\nexport function expectOutput(input: string) {\n const output = sourceToAST(input);\n return expect(output);\n}\nexport function expectErrorMessage(input: string) {\n let errorMessage = '';\n try {\n sourceToAST(input);\n } catch (error: any) {",
"score": 0.7662322521209717
}
] | typescript | throw new CSTParsingError(matchResult); |
import { Node } from 'ohm-js';
import { toAST } from 'ohm-js/extras';
import { CSTParsingError } from '../errors';
import grammar from '../grammar';
export enum ConcreteNodeTypes {
TextNode = 'TextNode',
LiquidDropNode = 'LiquidDropNode',
ElementOpeningTag = 'ElementOpeningTag',
ElementClosingTag = 'ElementClosingTag',
ElementSelfClosingTag = 'ElementSelfClosingTag',
AttributeDoubleQuoted = 'AttributeDoubleQuoted',
AttributeSingleQuoted = 'AttributeSingleQuoted',
AttributeUnquoted = 'AttributeUnquoted',
AttributeEmpty = 'AttributeEmpty',
}
export type ConcreteNode =
| ConcreteTextNode
| ConcreteLiquidDropNode
| ConcreteElementOpeningTagNode
| ConcreteElementClosingTagNode
| ConcreteElementSelfClosingTagNode;
export type ConcreteBasicNode<T> = {
type: T;
locStart: number;
locEnd: number;
source: string;
};
export type ConcreteTextNode = {
value: string;
} & ConcreteBasicNode<ConcreteNodeTypes.TextNode>;
export type ConcreteLiquidDropNode = {
value: string;
} & ConcreteBasicNode<ConcreteNodeTypes.LiquidDropNode>;
export type ConcreteElementOpeningTagNode = {
name: string;
attributes: ConcreteAttributeNode[];
} & ConcreteBasicNode<ConcreteNodeTypes.ElementOpeningTag>;
export type ConcreteElementClosingTagNode = {
name: string;
} & ConcreteBasicNode<ConcreteNodeTypes.ElementClosingTag>;
export type ConcreteElementSelfClosingTagNode = {
name: string;
attributes: ConcreteAttributeNode[];
} & ConcreteBasicNode<ConcreteNodeTypes.ElementSelfClosingTag>;
export type ConcreteAttributeNodeBase<T> = {
name: ConcreteTextNode;
value: ConcreteTextNode;
} & ConcreteBasicNode<T>;
export type ConcreteAttributeNode =
| ConcreteAttributeDoubleQuoted
| ConcreteAttributeSingleQuoted
| ConcreteAttributeUnquoted
| ConcreteAttributeEmpty;
export type ConcreteAttributeDoubleQuoted =
{} & ConcreteAttributeNodeBase<ConcreteNodeTypes.AttributeDoubleQuoted>;
export type ConcreteAttributeSingleQuoted =
{} & ConcreteAttributeNodeBase<ConcreteNodeTypes.AttributeSingleQuoted>;
export type ConcreteAttributeUnquoted =
{} & ConcreteAttributeNodeBase<ConcreteNodeTypes.AttributeUnquoted>;
export type ConcreteAttributeEmpty = {
name: ConcreteTextNode;
} & ConcreteBasicNode<ConcreteNodeTypes.AttributeEmpty>;
export type CST = ConcreteNode[];
export type TemplateMapping = {
type: ConcreteNodeTypes;
locStart: (node: Node[]) => number;
locEnd: (node: Node[]) => number;
source: string;
[k: string]: string | number | boolean | object | null;
};
export type TopLevelFunctionMapping = (...nodes: Node[]) => any;
export type Mapping = {
[k: string]: number | TemplateMapping | TopLevelFunctionMapping;
};
function locStart(nodes: Node[]) {
return nodes[0].source.startIdx;
}
function locEnd(nodes: Node[]) {
return nodes[nodes.length - 1].source.endIdx;
}
export default function sourceToCST(source: string): ConcreteNode[] {
const | matchResult = grammar.match(source); |
if (matchResult.failed()) {
throw new CSTParsingError(matchResult);
}
const textNode = {
type: ConcreteNodeTypes.TextNode,
locStart,
locEnd,
value: function (this: Node) {
return this.sourceString;
},
source,
};
const mapping: Mapping = {
Node: 0,
TextNode: textNode,
liquidDropNode: {
type: ConcreteNodeTypes.LiquidDropNode,
locStart,
locEnd,
source,
value: 2,
},
liquidDropValue: (node: Node) => node.sourceString.trimEnd(),
ElementNode: 0,
ElementOpeningTag: {
type: ConcreteNodeTypes.ElementOpeningTag,
locStart,
locEnd,
name: 1,
attributes: 2,
source,
},
ElementClosingTag: {
type: ConcreteNodeTypes.ElementClosingTag,
locStart,
locEnd,
name: 1,
source,
},
ElementSelfClosingTag: {
type: ConcreteNodeTypes.ElementSelfClosingTag,
locStart,
locEnd,
name: 1,
attributes: 2,
source,
},
AttributeDoubleQuoted: {
type: ConcreteNodeTypes.AttributeDoubleQuoted,
locStart,
locEnd,
source,
name: 0,
value: 3,
},
AttributeSingleQuoted: {
type: ConcreteNodeTypes.AttributeSingleQuoted,
locStart,
locEnd,
source,
name: 0,
value: 3,
},
AttributeUnquoted: {
type: ConcreteNodeTypes.AttributeUnquoted,
locStart,
locEnd,
source,
name: 0,
value: 2,
},
AttributeEmpty: {
type: ConcreteNodeTypes.AttributeEmpty,
locStart,
locEnd,
source,
name: 0,
},
attributeName: textNode,
attributeDoubleQuotedValue: 0,
attributeSingleQuotedValue: 0,
attributeUnquotedValue: 0,
attributeDoubleQuotedTextNode: textNode,
attributeSingleQuotedTextNode: textNode,
attributeUnquotedTextNode: textNode,
};
const cst = toAST(matchResult, mapping) as ConcreteNode[];
return cst;
}
| src/parser/1-source-to-cst/index.ts | unshopable-liquidx-a101873 | [
{
"filename": "src/renderer/index.ts",
"retrieved_chunk": "}\nfunction renderEndMarker(node: ElementNode) {\n return `{% # LIQUIDX:END - SOURCE ${JSON.stringify(\n node.source.slice(node.locStart, node.locEnd),\n )} %}`;\n}\nfunction renderElement(\n node: ElementNode,\n { withSource = false, isChildOfElementNode = false } = {},\n) {",
"score": 0.7903676629066467
},
{
"filename": "src/parser/2-cst-to-ast/ast-builder.ts",
"retrieved_chunk": " source: string;\n constructor(source: string) {\n this.ast = [];\n this.cursor = [];\n this.source = source;\n }\n get current(): LiquidXNode[] {\n return deepGet<LiquidXNode[]>(this.cursor, this.ast);\n }\n get currentPosition(): number {",
"score": 0.788094162940979
},
{
"filename": "src/parser/2-cst-to-ast/index.ts",
"retrieved_chunk": " astBuilder.push(\n toTextNode({\n type: ConcreteNodeTypes.TextNode,\n locStart: prevNode.locEnd,\n locEnd: node.locStart,\n source: node.source,\n value: prevNode.source.slice(prevNode.locEnd, node.locStart),\n }),\n );\n }",
"score": 0.7584561109542847
},
{
"filename": "src/parser/errors.ts",
"retrieved_chunk": " this.name = 'UnknownConcreteNodeTypeError';\n }\n}\nexport class ASTParsingError extends LoggableError {\n constructor(message: string, source: string, locStart: number, locEnd: number) {\n super({ result: undefined, message, source, locStart, locEnd });\n this.name = 'ASTParsingError';\n }\n}",
"score": 0.7544372081756592
},
{
"filename": "src/parser/2-cst-to-ast/index.ts",
"retrieved_chunk": " }\n return astBuilder.finish();\n}\nexport default function sourceToAST(source: string): LiquidXNode[] {\n const cst = sourceToCST(source);\n const ast = cstToAST(cst);\n return ast;\n}",
"score": 0.7534031867980957
}
] | typescript | matchResult = grammar.match(source); |
import sourceToCST, {
ConcreteAttributeNode,
ConcreteElementOpeningTagNode,
ConcreteElementSelfClosingTagNode,
ConcreteLiquidDropNode,
ConcreteNode,
ConcreteNodeTypes,
ConcreteTextNode,
} from '../1-source-to-cst';
import { UnknownConcreteNodeTypeError } from '../errors';
import ASTBuilder from './ast-builder';
export type BasicNode<T> = {
type: T;
locStart: number;
locEnd: number;
source: string;
};
export enum NodeTypes {
TextNode = 'TextNode',
LiquidDropNode = 'LiquidDropNode',
ElementNode = 'ElementNode',
AttributeDoubleQuoted = 'AttributeDoubleQuoted',
AttributeSingleQuoted = 'AttributeSingleQuoted',
AttributeUnquoted = 'AttributeUnquoted',
AttributeEmpty = 'AttributeEmpty',
}
export type TextNode = {
value: string;
} & BasicNode<NodeTypes.TextNode>;
export type LiquidDropNode = {
value: string;
} & BasicNode<NodeTypes.LiquidDropNode>;
export type LiquidXNode = TextNode | LiquidDropNode | ElementNode | AttributeNode;
export type ElementNode = {
name: string;
source: string;
attributes: AttributeNode[];
children: LiquidXNode[];
} & BasicNode<NodeTypes.ElementNode>;
export type AttributeNode =
| AttributeDoubleQuoted
| AttributeSingleQuoted
| AttributeUnquoted
| AttributeEmpty;
export type AttributeNodeBase<T> = {
name: TextNode;
value: TextNode | LiquidDropNode;
} & BasicNode<T>;
export type AttributeDoubleQuoted = {} & AttributeNodeBase<NodeTypes.AttributeDoubleQuoted>;
export type AttributeSingleQuoted = {} & AttributeNodeBase<NodeTypes.AttributeSingleQuoted>;
export type AttributeUnquoted = {} & AttributeNodeBase<NodeTypes.AttributeUnquoted>;
export type AttributeEmpty = { name: TextNode } & BasicNode<NodeTypes.AttributeEmpty>;
function toTextNode(node: ConcreteTextNode): TextNode {
return {
type: NodeTypes.TextNode,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
value: node.value,
};
}
function toLiquidDropNode(node: ConcreteLiquidDropNode): LiquidDropNode {
return {
type: NodeTypes.LiquidDropNode,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
value: node.value,
};
}
function toElementNode(
node: ConcreteElementOpeningTagNode | ConcreteElementSelfClosingTagNode,
): ElementNode {
return {
type: NodeTypes.ElementNode,
locStart: node.locStart,
locEnd: node.locEnd,
name: node.name,
source: node.source,
attributes: toAttributes(node.attributes),
children: [],
};
}
function toAttributes(attributes: ConcreteAttributeNode[]) {
return cstToAST(attributes) as AttributeNode[];
}
function toAttributeValue(value: ConcreteTextNode | ConcreteLiquidDropNode) {
return cstToAST([value])[0] as TextNode | LiquidDropNode;
}
function isAttributeNode(node: any): boolean {
return (
node.type === ConcreteNodeTypes.AttributeDoubleQuoted ||
node.type === ConcreteNodeTypes.AttributeSingleQuoted ||
node.type === ConcreteNodeTypes.AttributeUnquoted ||
node.type === ConcreteNodeTypes.AttributeEmpty
);
}
function cstToAST(cst: ConcreteNode[] | ConcreteAttributeNode[]) {
if (cst.length === 0) return [];
const astBuilder = new | ASTBuilder(cst[0].source); |
for (let i = 0; i < cst.length; i += 1) {
const node = cst[i];
const prevNode = cst[i - 1];
// Add whitespaces and linebreaks that went missing after parsing. We don't need to do this
// if the node is an attribute since whitespaces between attributes is not important to preserve.
// In fact it would probably break the rendered output due to unexpected text nodes.
// TODO: This should be handled in the grammar/source-to-cst part instead (if possible).
if (prevNode?.source && !isAttributeNode(node)) {
const diff = node.locStart - prevNode.locEnd;
if (diff > 0) {
astBuilder.push(
toTextNode({
type: ConcreteNodeTypes.TextNode,
locStart: prevNode.locEnd,
locEnd: node.locStart,
source: node.source,
value: prevNode.source.slice(prevNode.locEnd, node.locStart),
}),
);
}
}
switch (node.type) {
case ConcreteNodeTypes.TextNode: {
astBuilder.push(toTextNode(node));
break;
}
case ConcreteNodeTypes.LiquidDropNode: {
astBuilder.push(toLiquidDropNode(node));
break;
}
case ConcreteNodeTypes.ElementOpeningTag: {
astBuilder.open(toElementNode(node));
break;
}
case ConcreteNodeTypes.ElementClosingTag: {
astBuilder.close(node, NodeTypes.ElementNode);
break;
}
case ConcreteNodeTypes.ElementSelfClosingTag: {
astBuilder.open(toElementNode(node));
astBuilder.close(node, NodeTypes.ElementNode);
break;
}
case ConcreteNodeTypes.AttributeDoubleQuoted:
case ConcreteNodeTypes.AttributeSingleQuoted:
case ConcreteNodeTypes.AttributeUnquoted: {
const attributeNode: AttributeDoubleQuoted | AttributeSingleQuoted | AttributeUnquoted = {
type: node.type as unknown as
| NodeTypes.AttributeDoubleQuoted
| NodeTypes.AttributeSingleQuoted
| NodeTypes.AttributeUnquoted,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
name: cstToAST([node.name])[0] as TextNode,
value: toAttributeValue(node.value),
};
astBuilder.push(attributeNode);
break;
}
case ConcreteNodeTypes.AttributeEmpty: {
const attributeNode: AttributeEmpty = {
type: NodeTypes.AttributeEmpty,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
name: cstToAST([node.name])[0] as TextNode,
};
astBuilder.push(attributeNode);
break;
}
default: {
throw new UnknownConcreteNodeTypeError(
'',
(node as any)?.source,
(node as any)?.locStart,
(node as any)?.locEnd,
);
}
}
}
return astBuilder.finish();
}
export default function sourceToAST(source: string): LiquidXNode[] {
const cst = sourceToCST(source);
const ast = cstToAST(cst);
return ast;
}
| src/parser/2-cst-to-ast/index.ts | unshopable-liquidx-a101873 | [
{
"filename": "src/renderer/index.ts",
"retrieved_chunk": " case NodeTypes.ElementNode: {\n output += renderElement(node, { withSource, isChildOfElementNode });\n break;\n }\n case NodeTypes.AttributeDoubleQuoted:\n case NodeTypes.AttributeSingleQuoted:\n case NodeTypes.AttributeUnquoted: {\n const name = renderText(node.name);\n let value = null;\n if (node.value.type === NodeTypes.TextNode) {",
"score": 0.8244726657867432
},
{
"filename": "src/parser/1-source-to-cst/index.ts",
"retrieved_chunk": " | ConcreteAttributeSingleQuoted\n | ConcreteAttributeUnquoted\n | ConcreteAttributeEmpty;\nexport type ConcreteAttributeDoubleQuoted =\n {} & ConcreteAttributeNodeBase<ConcreteNodeTypes.AttributeDoubleQuoted>;\nexport type ConcreteAttributeSingleQuoted =\n {} & ConcreteAttributeNodeBase<ConcreteNodeTypes.AttributeSingleQuoted>;\nexport type ConcreteAttributeUnquoted =\n {} & ConcreteAttributeNodeBase<ConcreteNodeTypes.AttributeUnquoted>;\nexport type ConcreteAttributeEmpty = {",
"score": 0.8133658766746521
},
{
"filename": "src/parser/1-source-to-cst/index.ts",
"retrieved_chunk": " attributeName: textNode,\n attributeDoubleQuotedValue: 0,\n attributeSingleQuotedValue: 0,\n attributeUnquotedValue: 0,\n attributeDoubleQuotedTextNode: textNode,\n attributeSingleQuotedTextNode: textNode,\n attributeUnquotedTextNode: textNode,\n };\n const cst = toAST(matchResult, mapping) as ConcreteNode[];\n return cst;",
"score": 0.7972182035446167
},
{
"filename": "src/parser/1-source-to-cst/index.ts",
"retrieved_chunk": "export type ConcreteElementSelfClosingTagNode = {\n name: string;\n attributes: ConcreteAttributeNode[];\n} & ConcreteBasicNode<ConcreteNodeTypes.ElementSelfClosingTag>;\nexport type ConcreteAttributeNodeBase<T> = {\n name: ConcreteTextNode;\n value: ConcreteTextNode;\n} & ConcreteBasicNode<T>;\nexport type ConcreteAttributeNode =\n | ConcreteAttributeDoubleQuoted",
"score": 0.7849557399749756
},
{
"filename": "src/parser/1-source-to-cst/index.ts",
"retrieved_chunk": " AttributeDoubleQuoted = 'AttributeDoubleQuoted',\n AttributeSingleQuoted = 'AttributeSingleQuoted',\n AttributeUnquoted = 'AttributeUnquoted',\n AttributeEmpty = 'AttributeEmpty',\n}\nexport type ConcreteNode =\n | ConcreteTextNode\n | ConcreteLiquidDropNode\n | ConcreteElementOpeningTagNode\n | ConcreteElementClosingTagNode",
"score": 0.7822086811065674
}
] | typescript | ASTBuilder(cst[0].source); |
import { type IResult, type Request } from 'mssql';
import type { StoredProcedureParameter, StoredProcedureSchema, ILogger } from '../types';
import { type DatabaseExecutor } from '../executor';
import { convertSqlValueToJsValue } from '../utils';
/**
* A manager for stored procedure metadata.
* Handles the retrieval and caching of stored procedure metadata.
*/
export class StoredProcedureMetadataManager {
/**
* Regular expression to extract MSSQL stored procedure names.
* See https://regex101.com/r/cMsTyT/1 for this regex.
*/
private static readonly storedProcedureNameRegex =
/((?:(?:\[([\w\s]+)\]|(\w+))\.)?(?:\[([\w\s]+)\]|(\w+))\.(?:\[([\w\s]+)\]|(\w+)))/i;
/**
* Matches any comments from the Stored Procedure definition.
* See https://regex101.com/r/dxA7n0/1 for this regex.
*/
private static readonly commentRegex = /(?:\s*-{2}.+\s*$)|(?:\/\*([\s\S]*?)\*\/)/gm;
/**
* Matches the parameters from the Stored Procedure definition.
* See https://regex101.com/r/4TaTky/1 for this regex.
*/
private static readonly parameterSectionRegex =
/(?<=(?:CREATE|ALTER)\s+PROCEDURE)\s+((?:(?:\[([\w\s]+)\]|(\w+))\.)?(?:\[([\w\s]+)\]|(\w+))\.(?:\[([\w\s]+)\]|(\w+)))(.*?)(?=(?:AS|FOR\s+REPLICATION)[^\w])/is;
/**
* See https://regex101.com/r/iMEaLb/1 for this regex.
* Match the individual parameters in the Parameter Definition.
*/
private static readonly parameterDefinitionRegex = /(@[\w]+)\s+([^\s]+)\s*=\s*([^, ]*),?/gi;
constructor(private readonly _databaseExecutor: DatabaseExecutor) {}
/**
* Parses the stored procedure parameter schema into a StoredProcedureParameter array.
* @param {string} storedProcedureName - The name of the stored procedure to retrieve the parameter schema for.
* @returns A Promise that resolves to the result of the stored procedure execution.
*/
public async getStoredProcedureParameterSchema(
storedProcedureName: string,
logger: ILogger,
): Promise<IResult<StoredProcedureSchema>> {
return await this._databaseExecutor.executeQueryRequest(async (request: Request) => {
// Remove square bracket notation if any, and split into schema and name.
const schemaAndName = storedProcedureName.replace(/\[|\]/g, '').split('.');
const result = await request.query<StoredProcedureSchema>(
'SELECT ' +
'PARAMETER_NAME as name, ' +
'DATA_TYPE as type, ' +
'PARAMETER_MODE as mode, ' +
'CHARACTER_MAXIMUM_LENGTH length, ' +
'NUMERIC_PRECISION as precision, ' +
'NUMERIC_SCALE as scale ' +
'FROM INFORMATION_SCHEMA.PARAMETERS ' +
`WHERE SPECIFIC_SCHEMA = '${schemaAndName[0]}' AND SPECIFIC_NAME = '${schemaAndName[1]}';
SELECT OBJECT_DEFINITION(OBJECT_ID('${storedProcedureName}')) AS storedProcedureDefinition;`,
);
const recordSetLength = result.recordsets.length as number;
if (recordSetLength < 1 || recordSetLength > 2) {
throw new Error(
`Could not retrieve stored procedure parameter schema from Database for stored procedure ${storedProcedureName}.`,
);
}
if (recordSetLength !== 2 || result.recordsets[1].length !== 1) {
throw new Error(
`Could not retrieve stored procedure definition from Database for stored procedure ${storedProcedureName}.`,
);
}
return result;
}, logger);
}
/**
* Parses the stored procedure parameter schema into a StoredProcedureParameter array.
* @param {string} storedProcedureName - The name of the stored procedure to parse the parameter schema for.
* @param {IResult<StoredProcedureSchema>} schemaResult - The result of the stored procedure parameter schema query.
* @returns A StoredProcedureParameter array.
*/
public parseStoredProcedureParameters(
storedProcedureName: string,
schemaResult: IResult<StoredProcedureSchema>,
): IterableIterator<StoredProcedureParameter> {
const parameterSchemaMap: Map<string, StoredProcedureParameter> =
schemaResult.recordsets[0].reduce(
(parameterMap: Map<string, StoredProcedureParameter>, item: StoredProcedureParameter) => {
parameterMap.set(item.name, item);
return parameterMap;
},
new Map<string, StoredProcedureParameter>(),
);
const storedProcedureDefinition = schemaResult.recordsets[1][0].storedProcedureDefinition;
if (storedProcedureDefinition == null) {
throw new Error(
`Could not parse stored procedure definition for stored procedure ${storedProcedureName}.`,
);
}
const commentStrippedStoredProcedureDefinition = storedProcedureDefinition.replace(
StoredProcedureMetadataManager.commentRegex,
'',
);
if (commentStrippedStoredProcedureDefinition === '') {
throw new Error(
`Could not parse stored procedure comments from definition for stored procedure ${storedProcedureName}.`,
);
}
const parameterSection = commentStrippedStoredProcedureDefinition.match(
StoredProcedureMetadataManager.parameterSectionRegex,
);
if (parameterSection === null || parameterSection.length !== 9) {
throw new Error(
`Could not parse stored procedure parameters from definition for stored procedure ${storedProcedureName}.`,
);
}
const parameterDefinition = parameterSection[8];
let parameterDefinitionMatch;
while (
(parameterDefinitionMatch =
StoredProcedureMetadataManager.parameterDefinitionRegex.exec(parameterDefinition)) !== null
) {
const name = parameterDefinitionMatch[1];
const type = parameterDefinitionMatch[2];
const defaultValue = parameterDefinitionMatch[3];
const parameter = parameterSchemaMap.get(name);
if (parameter !== undefined) {
parameter. | defaultValue = convertSqlValueToJsValue(defaultValue, type); |
}
}
return parameterSchemaMap.values();
}
}
| src/lib/stored-procedure/stored-procedure-metadata-manager.ts | Falven-mssql-data-source-bca6621 | [
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " if (preparedParameter != null) {\n preparedParameter.value = inputParameters[inputParameterKey];\n }\n // We don't care about provided input parameters that are missing in the Stored Procedure definition.\n }\n return preparedParameters;\n }\n private getMissingRequiredParameters(\n parameters: Map<string, PreparedStoredProcedureParameter>,\n ): PreparedStoredProcedureParameter[] {",
"score": 0.8206737637519836
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " for (const spParameter of storedProcedureParameters) {\n const { name, type, length, precision, scale, ...rest } = spParameter;\n const parameterName = name.slice(1);\n // Let's use the parameter name in lowercase as the lookup key.\n preparedParameters.set(parameterName.toLowerCase(), {\n name: parameterName,\n type: mapDbTypeToDriverType({\n type,\n length,\n precision,",
"score": 0.81842440366745
},
{
"filename": "src/lib/utils/type-map.ts",
"retrieved_chunk": "}: Pick<StoredProcedureParameter, 'type' | 'length' | 'precision' | 'scale'>): ISqlTypeFactory => {\n const types: IndexableTypes = TYPES;\n const property = findPropertyCaseInsensitive(types, type);\n if (property !== null) {\n const typeFactory = types[property as TypesKey];\n if (isSqlTypeFactoryWithNoParams(typeFactory)) {\n return typeFactory();\n } else if (isSqlTypeFactoryWithLength(typeFactory)) {\n return (typeFactory as ISqlTypeFactoryWithLength)(length === -1 ? MAX : length);\n } else if (isSqlTypeFactoryWithScale(typeFactory)) {",
"score": 0.8181779980659485
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " private addParametersToRequest(\n parameters: Map<string, PreparedStoredProcedureParameter>,\n request: Request,\n ): Request {\n const preparedRequest = request;\n for (const parameter of parameters.values()) {\n const { name, type, mode, value, defaultValue } = parameter;\n if (defaultValue !== undefined && value === undefined) {\n continue;\n }",
"score": 0.8152432441711426
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " // Check what required parameters are missing.\n const missingRequiredParameters = [];\n for (const parameter of parameters.values()) {\n // If they have a default value they can be ommitted from the request.\n if (parameter.defaultValue === undefined && parameter.value === undefined) {\n missingRequiredParameters.push(parameter);\n }\n }\n return missingRequiredParameters;\n }",
"score": 0.8088726997375488
}
] | typescript | defaultValue = convertSqlValueToJsValue(defaultValue, type); |
import { camelCase } from 'lodash';
import { type Request, type IProcedureResult, type IResult, type IRecordSet } from 'mssql';
import { type GraphQLResolveInfo } from 'graphql';
import {
type DriverType,
type PreparedStoredProcedureParameter,
ParameterMode,
type StoredProcedureSchema,
type StoredProcedureParameter,
type ILogger,
type InputParameters,
} from '../types';
import { mapDbTypeToDriverType, replacer } from '../utils';
import { logExecutionBegin, logPerformance, logSafely } from '../logging';
import {
type StoredProcedureCacheManager,
type StoredProcedureMetadataManager,
} from '../stored-procedure';
import { type IResolverProcedureResult } from '../types/i-resolver-procedure-result';
import { getNodeSelectionSetNames, getFieldNamesExcludingNode } from '../utils/graphql-helper';
/**
* StoredProcedureManager provides methods to interact
* with a Microsoft SQL Server database for managing stored procedures.
*/
export class StoredProcedureManager {
/**
* Creates a new instance of StoredProcedureManager.
*/
constructor(
private readonly _storedProcedureCacheManager: StoredProcedureCacheManager,
private readonly _storedProcedureMetadataManager: StoredProcedureMetadataManager,
) {}
/**
* Executes a stored procedure with the provided input parameters, and returns the result.
* @template TVal - The type of records in the result set.
* @template TRet - The type of the result object to be returned.
* @param {string} storedProcedureName - The name of the stored procedure to execute.
* @param {StoredProcedureInput} input - The input parameters for the stored procedure.
* @param {Request} request - The request to execute the stored procedure.
* @param {GraphQLResolveInfo | undefined} info - If provided, will be used to case-insensitively map the stored
* procedure results to the correct schema field names.
* @param {ILogger} logger - The logger to use for logging.
* @returns A Promise that resolves to the result of the stored procedure execution.
*/
public async executeStoredProcedure<T>(
storedProcedureName: string,
input: InputParameters,
request: Request,
logger: ILogger,
info?: GraphQLResolveInfo,
): Promise<IResolverProcedureResult<T>> {
let startTime = performance.now();
let schema = (await this._storedProcedureCacheManager.tryGetFromCache(storedProcedureName)) as
| IResult<StoredProcedureSchema>
| undefined;
if (schema === undefined) {
logSafely(
logger,
'info',
// Yellow
`\x1b[33mCache miss occurred while retrieving the cached schema for ${storedProcedureName}\x1b[0m`,
);
schema = await this._storedProcedureMetadataManager.getStoredProcedureParameterSchema(
storedProcedureName,
logger,
);
await | this._storedProcedureCacheManager.addToCache(storedProcedureName, schema); |
} else {
logSafely(
logger,
'info',
// Green
`\x1b[32mCache hit occurred while retrieving the cached schema for ${storedProcedureName}\x1b[0m`,
);
}
logPerformance(logger, 'getStoredProcedureParameterSchema', startTime);
startTime = performance.now();
const storedProcedureParameters =
this._storedProcedureMetadataManager.parseStoredProcedureParameters(
storedProcedureName,
schema,
);
logPerformance(logger, 'parseStoredProcedureParameters', startTime);
startTime = performance.now();
const preparedRequest = this.prepareStoredProcedureRequest(
storedProcedureParameters,
input,
request,
);
logPerformance(logger, 'prepareStoredProcedureRequest', startTime);
startTime = performance.now();
logExecutionBegin(
logger,
`Stored Procedure ${storedProcedureName} with parameters`,
preparedRequest.parameters,
// Green
'32m',
);
const result = await preparedRequest.execute(storedProcedureName);
startTime = performance.now();
const preparedResult = this.prepareStoredProcedureResult(result, info);
logPerformance(logger, 'prepareStoredProcedureResult', startTime);
return preparedResult;
}
private prepareParameters(
storedProcedureParameters: IterableIterator<StoredProcedureParameter>,
input: InputParameters,
): Map<string, PreparedStoredProcedureParameter> {
// We want to use the inferred DB Stored Procedure schema as the source of truth.
const preparedParameters = new Map<string, PreparedStoredProcedureParameter>();
for (const spParameter of storedProcedureParameters) {
const { name, type, length, precision, scale, ...rest } = spParameter;
const parameterName = name.slice(1);
// Let's use the parameter name in lowercase as the lookup key.
preparedParameters.set(parameterName.toLowerCase(), {
name: parameterName,
type: mapDbTypeToDriverType({
type,
length,
precision,
scale,
}) as DriverType,
value: undefined,
...rest,
});
}
// Populate our input values into the request parameters.
const inputParameters = input as Record<string, unknown>;
for (const inputParameterKey in inputParameters) {
const preparedParameter = preparedParameters.get(inputParameterKey.toLowerCase());
if (preparedParameter != null) {
preparedParameter.value = inputParameters[inputParameterKey];
}
// We don't care about provided input parameters that are missing in the Stored Procedure definition.
}
return preparedParameters;
}
private getMissingRequiredParameters(
parameters: Map<string, PreparedStoredProcedureParameter>,
): PreparedStoredProcedureParameter[] {
// Check what required parameters are missing.
const missingRequiredParameters = [];
for (const parameter of parameters.values()) {
// If they have a default value they can be ommitted from the request.
if (parameter.defaultValue === undefined && parameter.value === undefined) {
missingRequiredParameters.push(parameter);
}
}
return missingRequiredParameters;
}
private addParametersToRequest(
parameters: Map<string, PreparedStoredProcedureParameter>,
request: Request,
): Request {
const preparedRequest = request;
for (const parameter of parameters.values()) {
const { name, type, mode, value, defaultValue } = parameter;
if (defaultValue !== undefined && value === undefined) {
continue;
}
const modeEnum = mode;
if (modeEnum === ParameterMode.IN) {
preparedRequest.input(name, type, value);
} else if (modeEnum === ParameterMode.INOUT) {
preparedRequest.output(name, type, value);
} else {
throw new Error(`Unknown parameter mode: ${mode}`);
}
}
return preparedRequest;
}
/**
* Prepares the stored procedure request.
* @param {IterableIterator<StoredProcedureParameter>} storedProcedureParameters - The stored procedure parameters.
* @param {StoredProcedureInput} input - The input object.
* @param {Request} request - The request object.
* @returns A prepared request object.
*/
private prepareStoredProcedureRequest(
storedProcedureParameters: IterableIterator<StoredProcedureParameter>,
input: InputParameters,
request: Request,
): Request {
const parameters = this.prepareParameters(storedProcedureParameters, input);
const missingRequiredParameters = this.getMissingRequiredParameters(parameters);
const missingLength = missingRequiredParameters.length;
if (missingLength > 0) {
throw new Error(
`Missing ${missingLength} required parameters: ${missingRequiredParameters
.map((param) => JSON.stringify(param, replacer, 0))
.join(', ')}.`,
);
}
const preparedRequest = this.addParametersToRequest(parameters, request);
return preparedRequest;
}
/**
* Maps the keys of an object based on the provided mapping.
* @template T - The type of the original object.
* @param {T} obj - The object whose keys need to be mapped.
* @param {Record<string, string>} mapping - A dictionary containing the mapping of the original keys to the new keys.
* @returns {T} A new object with the keys mapped according to the provided mapping.
*/
private mapKeysWithMapping<T extends Record<string, unknown>>(
obj: T,
mapping: Record<string, string>,
): T {
const result: Record<string, unknown> = {};
for (const key in obj) {
const mappedKey = mapping[key.toLowerCase()] ?? camelCase(key);
result[mappedKey] = obj[key];
}
return result as T;
}
/**
* Prepares the stored procedure result into a GraphQL result object.
* @param {IProcedureResult} result - The stored procedure result.
* @param {GraphQLResolveInfo | undefined} info - If provided, will be used to case-insensitively map the stored
* procedure results to the correct schema field names.
* @returns {IResolverProcedureResult} A prepared GraphQL result object.
*/
private prepareStoredProcedureResult<T extends Record<string, unknown>>(
result: IProcedureResult<T>,
info?: GraphQLResolveInfo,
): IResolverProcedureResult<T> {
const { resultSetFields, outputFields } =
info !== undefined
? {
resultSetFields: getNodeSelectionSetNames(info, 'resultSets'),
outputFields: getFieldNamesExcludingNode(info, 'resultSets'),
}
: { resultSetFields: {}, outputFields: {} };
const resultSets = result.recordsets.map((recordset: IRecordSet<Record<string, unknown>>) => {
return recordset.map((record: Record<string, unknown>) =>
this.mapKeysWithMapping(record, resultSetFields),
);
});
const output = this.mapKeysWithMapping(result.output, outputFields);
const preparedResult = {
returnValue: result.returnValue,
resultSets: resultSets as T[][],
rowsAffected: result.rowsAffected,
...output,
};
return preparedResult;
}
}
| src/lib/stored-procedure/stored-procedure-manager.ts | Falven-mssql-data-source-bca6621 | [
{
"filename": "src/lib/datasource/mssql-datasource.ts",
"retrieved_chunk": " this._databaseExecutor,\n );\n this._storedProcedureCacheManager = new StoredProcedureCacheManager();\n this._storedProcedureManager = new StoredProcedureManager(\n this._storedProcedureCacheManager,\n this._storedProcedureMetadataManager,\n );\n }\n /**\n * Executes a stored procedure for a Query operation with the provided input parameters, and returns the result.",
"score": 0.7844321727752686
},
{
"filename": "src/lib/stored-procedure/stored-procedure-metadata-manager.ts",
"retrieved_chunk": " `Could not parse stored procedure comments from definition for stored procedure ${storedProcedureName}.`,\n );\n }\n const parameterSection = commentStrippedStoredProcedureDefinition.match(\n StoredProcedureMetadataManager.parameterSectionRegex,\n );\n if (parameterSection === null || parameterSection.length !== 9) {\n throw new Error(\n `Could not parse stored procedure parameters from definition for stored procedure ${storedProcedureName}.`,\n );",
"score": 0.7802853584289551
},
{
"filename": "src/lib/stored-procedure/stored-procedure-metadata-manager.ts",
"retrieved_chunk": " );\n }\n if (recordSetLength !== 2 || result.recordsets[1].length !== 1) {\n throw new Error(\n `Could not retrieve stored procedure definition from Database for stored procedure ${storedProcedureName}.`,\n );\n }\n return result;\n }, logger);\n }",
"score": 0.7710242867469788
},
{
"filename": "src/lib/stored-procedure/stored-procedure-metadata-manager.ts",
"retrieved_chunk": " throw new Error(\n `Could not parse stored procedure definition for stored procedure ${storedProcedureName}.`,\n );\n }\n const commentStrippedStoredProcedureDefinition = storedProcedureDefinition.replace(\n StoredProcedureMetadataManager.commentRegex,\n '',\n );\n if (commentStrippedStoredProcedureDefinition === '') {\n throw new Error(",
"score": 0.766816258430481
},
{
"filename": "src/lib/datasource/mssql-datasource.ts",
"retrieved_chunk": " info?: GraphQLResolveInfo,\n ): Promise<IResolverProcedureResult<T>> {\n const startTime = performance.now();\n const logger = this._queryLogger;\n logExecutionBegin(logger, `Stored Procedure Query ${storedProcedureName} with inputs`, input);\n const result = await this._databaseExecutor.executeQueryRequest(\n async (request: Request): Promise<IResolverProcedureResult<T>> =>\n await this._storedProcedureManager.executeStoredProcedure<T>(\n storedProcedureName,\n input,",
"score": 0.7642738223075867
}
] | typescript | this._storedProcedureCacheManager.addToCache(storedProcedureName, schema); |
import sourceToCST, {
ConcreteAttributeNode,
ConcreteElementOpeningTagNode,
ConcreteElementSelfClosingTagNode,
ConcreteLiquidDropNode,
ConcreteNode,
ConcreteNodeTypes,
ConcreteTextNode,
} from '../1-source-to-cst';
import { UnknownConcreteNodeTypeError } from '../errors';
import ASTBuilder from './ast-builder';
export type BasicNode<T> = {
type: T;
locStart: number;
locEnd: number;
source: string;
};
export enum NodeTypes {
TextNode = 'TextNode',
LiquidDropNode = 'LiquidDropNode',
ElementNode = 'ElementNode',
AttributeDoubleQuoted = 'AttributeDoubleQuoted',
AttributeSingleQuoted = 'AttributeSingleQuoted',
AttributeUnquoted = 'AttributeUnquoted',
AttributeEmpty = 'AttributeEmpty',
}
export type TextNode = {
value: string;
} & BasicNode<NodeTypes.TextNode>;
export type LiquidDropNode = {
value: string;
} & BasicNode<NodeTypes.LiquidDropNode>;
export type LiquidXNode = TextNode | LiquidDropNode | ElementNode | AttributeNode;
export type ElementNode = {
name: string;
source: string;
attributes: AttributeNode[];
children: LiquidXNode[];
} & BasicNode<NodeTypes.ElementNode>;
export type AttributeNode =
| AttributeDoubleQuoted
| AttributeSingleQuoted
| AttributeUnquoted
| AttributeEmpty;
export type AttributeNodeBase<T> = {
name: TextNode;
value: TextNode | LiquidDropNode;
} & BasicNode<T>;
export type AttributeDoubleQuoted = {} & AttributeNodeBase<NodeTypes.AttributeDoubleQuoted>;
export type AttributeSingleQuoted = {} & AttributeNodeBase<NodeTypes.AttributeSingleQuoted>;
export type AttributeUnquoted = {} & AttributeNodeBase<NodeTypes.AttributeUnquoted>;
export type AttributeEmpty = { name: TextNode } & BasicNode<NodeTypes.AttributeEmpty>;
function toTextNode(node: ConcreteTextNode): TextNode {
return {
type: NodeTypes.TextNode,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
value: node.value,
};
}
function toLiquidDropNode(node: ConcreteLiquidDropNode): LiquidDropNode {
return {
type: NodeTypes.LiquidDropNode,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
value: node.value,
};
}
function toElementNode(
node: ConcreteElementOpeningTagNode | ConcreteElementSelfClosingTagNode,
): ElementNode {
return {
type: NodeTypes.ElementNode,
locStart: node.locStart,
locEnd: node.locEnd,
name: node.name,
source: node.source,
attributes: toAttributes(node.attributes),
children: [],
};
}
function toAttributes(attributes: ConcreteAttributeNode[]) {
return cstToAST(attributes) as AttributeNode[];
}
function toAttributeValue(value: ConcreteTextNode | ConcreteLiquidDropNode) {
return cstToAST([value])[0] as TextNode | LiquidDropNode;
}
function isAttributeNode(node: any): boolean {
return (
node.type === ConcreteNodeTypes.AttributeDoubleQuoted ||
node.type === ConcreteNodeTypes.AttributeSingleQuoted ||
node.type === ConcreteNodeTypes.AttributeUnquoted ||
node.type === ConcreteNodeTypes.AttributeEmpty
);
}
function cstToAST(cst: ConcreteNode[] | ConcreteAttributeNode[]) {
if (cst.length === 0) return [];
const astBuilder = new ASTBuilder(cst[0].source);
for (let i = 0; i < cst.length; i += 1) {
const node = cst[i];
const prevNode = cst[i - 1];
// Add whitespaces and linebreaks that went missing after parsing. We don't need to do this
// if the node is an attribute since whitespaces between attributes is not important to preserve.
// In fact it would probably break the rendered output due to unexpected text nodes.
// TODO: This should be handled in the grammar/source-to-cst part instead (if possible).
if (prevNode?.source && !isAttributeNode(node)) {
const diff = node.locStart - prevNode.locEnd;
if (diff > 0) {
astBuilder.push(
toTextNode({
type: ConcreteNodeTypes.TextNode,
locStart: prevNode.locEnd,
locEnd: node.locStart,
source: node.source,
value: prevNode.source.slice(prevNode.locEnd, node.locStart),
}),
);
}
}
switch (node.type) {
case ConcreteNodeTypes.TextNode: {
astBuilder.push(toTextNode(node));
break;
}
case ConcreteNodeTypes.LiquidDropNode: {
astBuilder.push(toLiquidDropNode(node));
break;
}
case ConcreteNodeTypes.ElementOpeningTag: {
astBuilder. | open(toElementNode(node)); |
break;
}
case ConcreteNodeTypes.ElementClosingTag: {
astBuilder.close(node, NodeTypes.ElementNode);
break;
}
case ConcreteNodeTypes.ElementSelfClosingTag: {
astBuilder.open(toElementNode(node));
astBuilder.close(node, NodeTypes.ElementNode);
break;
}
case ConcreteNodeTypes.AttributeDoubleQuoted:
case ConcreteNodeTypes.AttributeSingleQuoted:
case ConcreteNodeTypes.AttributeUnquoted: {
const attributeNode: AttributeDoubleQuoted | AttributeSingleQuoted | AttributeUnquoted = {
type: node.type as unknown as
| NodeTypes.AttributeDoubleQuoted
| NodeTypes.AttributeSingleQuoted
| NodeTypes.AttributeUnquoted,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
name: cstToAST([node.name])[0] as TextNode,
value: toAttributeValue(node.value),
};
astBuilder.push(attributeNode);
break;
}
case ConcreteNodeTypes.AttributeEmpty: {
const attributeNode: AttributeEmpty = {
type: NodeTypes.AttributeEmpty,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
name: cstToAST([node.name])[0] as TextNode,
};
astBuilder.push(attributeNode);
break;
}
default: {
throw new UnknownConcreteNodeTypeError(
'',
(node as any)?.source,
(node as any)?.locStart,
(node as any)?.locEnd,
);
}
}
}
return astBuilder.finish();
}
export default function sourceToAST(source: string): LiquidXNode[] {
const cst = sourceToCST(source);
const ast = cstToAST(cst);
return ast;
}
| src/parser/2-cst-to-ast/index.ts | unshopable-liquidx-a101873 | [
{
"filename": "src/parser/1-source-to-cst/index.ts",
"retrieved_chunk": "export type ConcreteLiquidDropNode = {\n value: string;\n} & ConcreteBasicNode<ConcreteNodeTypes.LiquidDropNode>;\nexport type ConcreteElementOpeningTagNode = {\n name: string;\n attributes: ConcreteAttributeNode[];\n} & ConcreteBasicNode<ConcreteNodeTypes.ElementOpeningTag>;\nexport type ConcreteElementClosingTagNode = {\n name: string;\n} & ConcreteBasicNode<ConcreteNodeTypes.ElementClosingTag>;",
"score": 0.7858116626739502
},
{
"filename": "src/renderer/index.ts",
"retrieved_chunk": " case NodeTypes.ElementNode: {\n output += renderElement(node, { withSource, isChildOfElementNode });\n break;\n }\n case NodeTypes.AttributeDoubleQuoted:\n case NodeTypes.AttributeSingleQuoted:\n case NodeTypes.AttributeUnquoted: {\n const name = renderText(node.name);\n let value = null;\n if (node.value.type === NodeTypes.TextNode) {",
"score": 0.7847388982772827
},
{
"filename": "src/renderer/index.ts",
"retrieved_chunk": " return output;\n}\nfunction renderText(node: TextNode) {\n return node.value;\n}\nfunction renderLiquidDrop(node: LiquidDropNode) {\n return node.value;\n}\nfunction renderAST(\n ast: LiquidXNode[],",
"score": 0.7740079164505005
},
{
"filename": "src/renderer/index.ts",
"retrieved_chunk": " value = JSON.stringify(renderText(node.value));\n } else {\n value = renderLiquidDrop(node.value);\n }\n output += `${name}: ${value}`;\n break;\n }\n case NodeTypes.AttributeEmpty: {\n const name = renderText(node.name);\n const value = true;",
"score": 0.7688074707984924
},
{
"filename": "src/parser/1-source-to-cst/index.ts",
"retrieved_chunk": " return this.sourceString;\n },\n source,\n };\n const mapping: Mapping = {\n Node: 0,\n TextNode: textNode,\n liquidDropNode: {\n type: ConcreteNodeTypes.LiquidDropNode,\n locStart,",
"score": 0.7617778778076172
}
] | typescript | open(toElementNode(node)); |
import sourceToCST, {
ConcreteAttributeNode,
ConcreteElementOpeningTagNode,
ConcreteElementSelfClosingTagNode,
ConcreteLiquidDropNode,
ConcreteNode,
ConcreteNodeTypes,
ConcreteTextNode,
} from '../1-source-to-cst';
import { UnknownConcreteNodeTypeError } from '../errors';
import ASTBuilder from './ast-builder';
export type BasicNode<T> = {
type: T;
locStart: number;
locEnd: number;
source: string;
};
export enum NodeTypes {
TextNode = 'TextNode',
LiquidDropNode = 'LiquidDropNode',
ElementNode = 'ElementNode',
AttributeDoubleQuoted = 'AttributeDoubleQuoted',
AttributeSingleQuoted = 'AttributeSingleQuoted',
AttributeUnquoted = 'AttributeUnquoted',
AttributeEmpty = 'AttributeEmpty',
}
export type TextNode = {
value: string;
} & BasicNode<NodeTypes.TextNode>;
export type LiquidDropNode = {
value: string;
} & BasicNode<NodeTypes.LiquidDropNode>;
export type LiquidXNode = TextNode | LiquidDropNode | ElementNode | AttributeNode;
export type ElementNode = {
name: string;
source: string;
attributes: AttributeNode[];
children: LiquidXNode[];
} & BasicNode<NodeTypes.ElementNode>;
export type AttributeNode =
| AttributeDoubleQuoted
| AttributeSingleQuoted
| AttributeUnquoted
| AttributeEmpty;
export type AttributeNodeBase<T> = {
name: TextNode;
value: TextNode | LiquidDropNode;
} & BasicNode<T>;
export type AttributeDoubleQuoted = {} & AttributeNodeBase<NodeTypes.AttributeDoubleQuoted>;
export type AttributeSingleQuoted = {} & AttributeNodeBase<NodeTypes.AttributeSingleQuoted>;
export type AttributeUnquoted = {} & AttributeNodeBase<NodeTypes.AttributeUnquoted>;
export type AttributeEmpty = { name: TextNode } & BasicNode<NodeTypes.AttributeEmpty>;
function toTextNode(node: ConcreteTextNode): TextNode {
return {
type: NodeTypes.TextNode,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
value: node.value,
};
}
function toLiquidDropNode(node: ConcreteLiquidDropNode): LiquidDropNode {
return {
type: NodeTypes.LiquidDropNode,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
value: node.value,
};
}
function toElementNode(
node: ConcreteElementOpeningTagNode | ConcreteElementSelfClosingTagNode,
): ElementNode {
return {
type: NodeTypes.ElementNode,
locStart: node.locStart,
locEnd: node.locEnd,
name: node.name,
source: node.source,
attributes: toAttributes(node.attributes),
children: [],
};
}
function toAttributes(attributes: ConcreteAttributeNode[]) {
return cstToAST(attributes) as AttributeNode[];
}
function toAttributeValue(value: ConcreteTextNode | ConcreteLiquidDropNode) {
return cstToAST([value])[0] as TextNode | LiquidDropNode;
}
function isAttributeNode(node: any): boolean {
return (
node.type === ConcreteNodeTypes.AttributeDoubleQuoted ||
node.type === ConcreteNodeTypes.AttributeSingleQuoted ||
node.type === ConcreteNodeTypes.AttributeUnquoted ||
node.type === ConcreteNodeTypes.AttributeEmpty
);
}
function cstToAST(cst: ConcreteNode[] | ConcreteAttributeNode[]) {
if (cst.length === 0) return [];
const astBuilder = new ASTBuilder(cst[0].source);
for (let i = 0; i < cst.length; i += 1) {
const node = cst[i];
const prevNode = cst[i - 1];
// Add whitespaces and linebreaks that went missing after parsing. We don't need to do this
// if the node is an attribute since whitespaces between attributes is not important to preserve.
// In fact it would probably break the rendered output due to unexpected text nodes.
// TODO: This should be handled in the grammar/source-to-cst part instead (if possible).
if (prevNode?.source && !isAttributeNode(node)) {
const diff = node.locStart - prevNode.locEnd;
if (diff > 0) {
astBuilder.push(
toTextNode({
type: ConcreteNodeTypes.TextNode,
locStart: prevNode.locEnd,
locEnd: node.locStart,
source: node.source,
value: prevNode.source.slice(prevNode.locEnd, node.locStart),
}),
);
}
}
switch (node.type) {
case ConcreteNodeTypes.TextNode: {
astBuilder.push(toTextNode(node));
break;
}
case ConcreteNodeTypes.LiquidDropNode: {
astBuilder.push(toLiquidDropNode(node));
break;
}
case ConcreteNodeTypes.ElementOpeningTag: {
astBuilder.open(toElementNode(node));
break;
}
case ConcreteNodeTypes.ElementClosingTag: {
astBuilder.close(node, NodeTypes.ElementNode);
break;
}
case ConcreteNodeTypes.ElementSelfClosingTag: {
astBuilder.open(toElementNode(node));
astBuilder.close(node, NodeTypes.ElementNode);
break;
}
case ConcreteNodeTypes.AttributeDoubleQuoted:
case ConcreteNodeTypes.AttributeSingleQuoted:
case ConcreteNodeTypes.AttributeUnquoted: {
const attributeNode: AttributeDoubleQuoted | AttributeSingleQuoted | AttributeUnquoted = {
type: node.type as unknown as
| NodeTypes.AttributeDoubleQuoted
| NodeTypes.AttributeSingleQuoted
| NodeTypes.AttributeUnquoted,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
name: cstToAST([node.name])[0] as TextNode,
value: toAttributeValue(node.value),
};
astBuilder.push(attributeNode);
break;
}
case ConcreteNodeTypes.AttributeEmpty: {
const attributeNode: AttributeEmpty = {
type: NodeTypes.AttributeEmpty,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
name: cstToAST([node.name])[0] as TextNode,
};
astBuilder.push(attributeNode);
break;
}
default: {
throw new UnknownConcreteNodeTypeError(
'',
(node as any)?.source,
(node as any)?.locStart,
(node as any)?.locEnd,
);
}
}
}
| return astBuilder.finish(); |
}
export default function sourceToAST(source: string): LiquidXNode[] {
const cst = sourceToCST(source);
const ast = cstToAST(cst);
return ast;
}
| src/parser/2-cst-to-ast/index.ts | unshopable-liquidx-a101873 | [
{
"filename": "src/renderer/index.ts",
"retrieved_chunk": " output += `${name}: ${value}`;\n break;\n }\n default: {\n console.log(node);\n // TODO\n throw new Error('');\n }\n }\n }",
"score": 0.7892622947692871
},
{
"filename": "src/parser/errors.ts",
"retrieved_chunk": " this.name = 'UnknownConcreteNodeTypeError';\n }\n}\nexport class ASTParsingError extends LoggableError {\n constructor(message: string, source: string, locStart: number, locEnd: number) {\n super({ result: undefined, message, source, locStart, locEnd });\n this.name = 'ASTParsingError';\n }\n}",
"score": 0.7719416618347168
},
{
"filename": "src/parser/errors.ts",
"retrieved_chunk": " };\n result = codeFrameColumns(source, location, {\n message: message,\n });\n } else {\n result = info.result;\n }\n super(result);\n this.name = 'BaseError';\n }",
"score": 0.7574750185012817
},
{
"filename": "src/parser/1-source-to-cst/index.ts",
"retrieved_chunk": " name: 0,\n value: 2,\n },\n AttributeEmpty: {\n type: ConcreteNodeTypes.AttributeEmpty,\n locStart,\n locEnd,\n source,\n name: 0,\n },",
"score": 0.7526693344116211
},
{
"filename": "src/parser/1-source-to-cst/index.ts",
"retrieved_chunk": " name: ConcreteTextNode;\n} & ConcreteBasicNode<ConcreteNodeTypes.AttributeEmpty>;\nexport type CST = ConcreteNode[];\nexport type TemplateMapping = {\n type: ConcreteNodeTypes;\n locStart: (node: Node[]) => number;\n locEnd: (node: Node[]) => number;\n source: string;\n [k: string]: string | number | boolean | object | null;\n};",
"score": 0.7510517239570618
}
] | typescript | return astBuilder.finish(); |
import { Node } from 'ohm-js';
import { toAST } from 'ohm-js/extras';
import { CSTParsingError } from '../errors';
import grammar from '../grammar';
export enum ConcreteNodeTypes {
TextNode = 'TextNode',
LiquidDropNode = 'LiquidDropNode',
ElementOpeningTag = 'ElementOpeningTag',
ElementClosingTag = 'ElementClosingTag',
ElementSelfClosingTag = 'ElementSelfClosingTag',
AttributeDoubleQuoted = 'AttributeDoubleQuoted',
AttributeSingleQuoted = 'AttributeSingleQuoted',
AttributeUnquoted = 'AttributeUnquoted',
AttributeEmpty = 'AttributeEmpty',
}
export type ConcreteNode =
| ConcreteTextNode
| ConcreteLiquidDropNode
| ConcreteElementOpeningTagNode
| ConcreteElementClosingTagNode
| ConcreteElementSelfClosingTagNode;
export type ConcreteBasicNode<T> = {
type: T;
locStart: number;
locEnd: number;
source: string;
};
export type ConcreteTextNode = {
value: string;
} & ConcreteBasicNode<ConcreteNodeTypes.TextNode>;
export type ConcreteLiquidDropNode = {
value: string;
} & ConcreteBasicNode<ConcreteNodeTypes.LiquidDropNode>;
export type ConcreteElementOpeningTagNode = {
name: string;
attributes: ConcreteAttributeNode[];
} & ConcreteBasicNode<ConcreteNodeTypes.ElementOpeningTag>;
export type ConcreteElementClosingTagNode = {
name: string;
} & ConcreteBasicNode<ConcreteNodeTypes.ElementClosingTag>;
export type ConcreteElementSelfClosingTagNode = {
name: string;
attributes: ConcreteAttributeNode[];
} & ConcreteBasicNode<ConcreteNodeTypes.ElementSelfClosingTag>;
export type ConcreteAttributeNodeBase<T> = {
name: ConcreteTextNode;
value: ConcreteTextNode;
} & ConcreteBasicNode<T>;
export type ConcreteAttributeNode =
| ConcreteAttributeDoubleQuoted
| ConcreteAttributeSingleQuoted
| ConcreteAttributeUnquoted
| ConcreteAttributeEmpty;
export type ConcreteAttributeDoubleQuoted =
{} & ConcreteAttributeNodeBase<ConcreteNodeTypes.AttributeDoubleQuoted>;
export type ConcreteAttributeSingleQuoted =
{} & ConcreteAttributeNodeBase<ConcreteNodeTypes.AttributeSingleQuoted>;
export type ConcreteAttributeUnquoted =
{} & ConcreteAttributeNodeBase<ConcreteNodeTypes.AttributeUnquoted>;
export type ConcreteAttributeEmpty = {
name: ConcreteTextNode;
} & ConcreteBasicNode<ConcreteNodeTypes.AttributeEmpty>;
export type CST = ConcreteNode[];
export type TemplateMapping = {
type: ConcreteNodeTypes;
locStart: (node: Node[]) => number;
locEnd: (node: Node[]) => number;
source: string;
[k: string]: string | number | boolean | object | null;
};
export type TopLevelFunctionMapping = (...nodes: Node[]) => any;
export type Mapping = {
[k: string]: number | TemplateMapping | TopLevelFunctionMapping;
};
function locStart(nodes: Node[]) {
return nodes[0].source.startIdx;
}
function locEnd(nodes: Node[]) {
return nodes[nodes.length - 1].source.endIdx;
}
export default function sourceToCST(source: string): ConcreteNode[] {
const matchResult = grammar.match(source);
if (matchResult.failed()) {
throw | new CSTParsingError(matchResult); |
}
const textNode = {
type: ConcreteNodeTypes.TextNode,
locStart,
locEnd,
value: function (this: Node) {
return this.sourceString;
},
source,
};
const mapping: Mapping = {
Node: 0,
TextNode: textNode,
liquidDropNode: {
type: ConcreteNodeTypes.LiquidDropNode,
locStart,
locEnd,
source,
value: 2,
},
liquidDropValue: (node: Node) => node.sourceString.trimEnd(),
ElementNode: 0,
ElementOpeningTag: {
type: ConcreteNodeTypes.ElementOpeningTag,
locStart,
locEnd,
name: 1,
attributes: 2,
source,
},
ElementClosingTag: {
type: ConcreteNodeTypes.ElementClosingTag,
locStart,
locEnd,
name: 1,
source,
},
ElementSelfClosingTag: {
type: ConcreteNodeTypes.ElementSelfClosingTag,
locStart,
locEnd,
name: 1,
attributes: 2,
source,
},
AttributeDoubleQuoted: {
type: ConcreteNodeTypes.AttributeDoubleQuoted,
locStart,
locEnd,
source,
name: 0,
value: 3,
},
AttributeSingleQuoted: {
type: ConcreteNodeTypes.AttributeSingleQuoted,
locStart,
locEnd,
source,
name: 0,
value: 3,
},
AttributeUnquoted: {
type: ConcreteNodeTypes.AttributeUnquoted,
locStart,
locEnd,
source,
name: 0,
value: 2,
},
AttributeEmpty: {
type: ConcreteNodeTypes.AttributeEmpty,
locStart,
locEnd,
source,
name: 0,
},
attributeName: textNode,
attributeDoubleQuotedValue: 0,
attributeSingleQuotedValue: 0,
attributeUnquotedValue: 0,
attributeDoubleQuotedTextNode: textNode,
attributeSingleQuotedTextNode: textNode,
attributeUnquotedTextNode: textNode,
};
const cst = toAST(matchResult, mapping) as ConcreteNode[];
return cst;
}
| src/parser/1-source-to-cst/index.ts | unshopable-liquidx-a101873 | [
{
"filename": "src/parser/2-cst-to-ast/index.ts",
"retrieved_chunk": " }\n return astBuilder.finish();\n}\nexport default function sourceToAST(source: string): LiquidXNode[] {\n const cst = sourceToCST(source);\n const ast = cstToAST(cst);\n return ast;\n}",
"score": 0.7873352766036987
},
{
"filename": "src/parser/2-cst-to-ast/ast-builder.ts",
"retrieved_chunk": " source: string;\n constructor(source: string) {\n this.ast = [];\n this.cursor = [];\n this.source = source;\n }\n get current(): LiquidXNode[] {\n return deepGet<LiquidXNode[]>(this.cursor, this.ast);\n }\n get currentPosition(): number {",
"score": 0.7757340669631958
},
{
"filename": "src/renderer/index.ts",
"retrieved_chunk": "}\nfunction renderEndMarker(node: ElementNode) {\n return `{% # LIQUIDX:END - SOURCE ${JSON.stringify(\n node.source.slice(node.locStart, node.locEnd),\n )} %}`;\n}\nfunction renderElement(\n node: ElementNode,\n { withSource = false, isChildOfElementNode = false } = {},\n) {",
"score": 0.7728244066238403
},
{
"filename": "src/parser/2-cst-to-ast/__tests__/utils.ts",
"retrieved_chunk": "import sourceToAST from '../';\nexport function expectOutput(input: string) {\n const output = sourceToAST(input);\n return expect(output);\n}\nexport function expectErrorMessage(input: string) {\n let errorMessage = '';\n try {\n sourceToAST(input);\n } catch (error: any) {",
"score": 0.7715568542480469
},
{
"filename": "src/parser/errors.ts",
"retrieved_chunk": " this.name = 'UnknownConcreteNodeTypeError';\n }\n}\nexport class ASTParsingError extends LoggableError {\n constructor(message: string, source: string, locStart: number, locEnd: number) {\n super({ result: undefined, message, source, locStart, locEnd });\n this.name = 'ASTParsingError';\n }\n}",
"score": 0.7682136297225952
}
] | typescript | new CSTParsingError(matchResult); |
import { type IResult, type Request } from 'mssql';
import type { StoredProcedureParameter, StoredProcedureSchema, ILogger } from '../types';
import { type DatabaseExecutor } from '../executor';
import { convertSqlValueToJsValue } from '../utils';
/**
* A manager for stored procedure metadata.
* Handles the retrieval and caching of stored procedure metadata.
*/
export class StoredProcedureMetadataManager {
/**
* Regular expression to extract MSSQL stored procedure names.
* See https://regex101.com/r/cMsTyT/1 for this regex.
*/
private static readonly storedProcedureNameRegex =
/((?:(?:\[([\w\s]+)\]|(\w+))\.)?(?:\[([\w\s]+)\]|(\w+))\.(?:\[([\w\s]+)\]|(\w+)))/i;
/**
* Matches any comments from the Stored Procedure definition.
* See https://regex101.com/r/dxA7n0/1 for this regex.
*/
private static readonly commentRegex = /(?:\s*-{2}.+\s*$)|(?:\/\*([\s\S]*?)\*\/)/gm;
/**
* Matches the parameters from the Stored Procedure definition.
* See https://regex101.com/r/4TaTky/1 for this regex.
*/
private static readonly parameterSectionRegex =
/(?<=(?:CREATE|ALTER)\s+PROCEDURE)\s+((?:(?:\[([\w\s]+)\]|(\w+))\.)?(?:\[([\w\s]+)\]|(\w+))\.(?:\[([\w\s]+)\]|(\w+)))(.*?)(?=(?:AS|FOR\s+REPLICATION)[^\w])/is;
/**
* See https://regex101.com/r/iMEaLb/1 for this regex.
* Match the individual parameters in the Parameter Definition.
*/
private static readonly parameterDefinitionRegex = /(@[\w]+)\s+([^\s]+)\s*=\s*([^, ]*),?/gi;
constructor(private readonly _databaseExecutor: DatabaseExecutor) {}
/**
* Parses the stored procedure parameter schema into a StoredProcedureParameter array.
* @param {string} storedProcedureName - The name of the stored procedure to retrieve the parameter schema for.
* @returns A Promise that resolves to the result of the stored procedure execution.
*/
public async getStoredProcedureParameterSchema(
storedProcedureName: string,
logger: ILogger,
): Promise<IResult<StoredProcedureSchema>> {
return await this._databaseExecutor.executeQueryRequest(async (request: Request) => {
// Remove square bracket notation if any, and split into schema and name.
const schemaAndName = storedProcedureName.replace(/\[|\]/g, '').split('.');
const result = await request.query<StoredProcedureSchema>(
'SELECT ' +
'PARAMETER_NAME as name, ' +
'DATA_TYPE as type, ' +
'PARAMETER_MODE as mode, ' +
'CHARACTER_MAXIMUM_LENGTH length, ' +
'NUMERIC_PRECISION as precision, ' +
'NUMERIC_SCALE as scale ' +
'FROM INFORMATION_SCHEMA.PARAMETERS ' +
`WHERE SPECIFIC_SCHEMA = '${schemaAndName[0]}' AND SPECIFIC_NAME = '${schemaAndName[1]}';
SELECT OBJECT_DEFINITION(OBJECT_ID('${storedProcedureName}')) AS storedProcedureDefinition;`,
);
const recordSetLength = result.recordsets.length as number;
if (recordSetLength < 1 || recordSetLength > 2) {
throw new Error(
`Could not retrieve stored procedure parameter schema from Database for stored procedure ${storedProcedureName}.`,
);
}
if (recordSetLength !== 2 || result.recordsets[1].length !== 1) {
throw new Error(
`Could not retrieve stored procedure definition from Database for stored procedure ${storedProcedureName}.`,
);
}
return result;
}, logger);
}
/**
* Parses the stored procedure parameter schema into a StoredProcedureParameter array.
* @param {string} storedProcedureName - The name of the stored procedure to parse the parameter schema for.
* @param {IResult<StoredProcedureSchema>} schemaResult - The result of the stored procedure parameter schema query.
* @returns A StoredProcedureParameter array.
*/
public parseStoredProcedureParameters(
storedProcedureName: string,
schemaResult: IResult<StoredProcedureSchema>,
): IterableIterator<StoredProcedureParameter> {
const parameterSchemaMap: Map<string, StoredProcedureParameter> =
schemaResult.recordsets[0].reduce(
(parameterMap: Map<string, StoredProcedureParameter>, item: StoredProcedureParameter) => {
| parameterMap.set(item.name, item); |
return parameterMap;
},
new Map<string, StoredProcedureParameter>(),
);
const storedProcedureDefinition = schemaResult.recordsets[1][0].storedProcedureDefinition;
if (storedProcedureDefinition == null) {
throw new Error(
`Could not parse stored procedure definition for stored procedure ${storedProcedureName}.`,
);
}
const commentStrippedStoredProcedureDefinition = storedProcedureDefinition.replace(
StoredProcedureMetadataManager.commentRegex,
'',
);
if (commentStrippedStoredProcedureDefinition === '') {
throw new Error(
`Could not parse stored procedure comments from definition for stored procedure ${storedProcedureName}.`,
);
}
const parameterSection = commentStrippedStoredProcedureDefinition.match(
StoredProcedureMetadataManager.parameterSectionRegex,
);
if (parameterSection === null || parameterSection.length !== 9) {
throw new Error(
`Could not parse stored procedure parameters from definition for stored procedure ${storedProcedureName}.`,
);
}
const parameterDefinition = parameterSection[8];
let parameterDefinitionMatch;
while (
(parameterDefinitionMatch =
StoredProcedureMetadataManager.parameterDefinitionRegex.exec(parameterDefinition)) !== null
) {
const name = parameterDefinitionMatch[1];
const type = parameterDefinitionMatch[2];
const defaultValue = parameterDefinitionMatch[3];
const parameter = parameterSchemaMap.get(name);
if (parameter !== undefined) {
parameter.defaultValue = convertSqlValueToJsValue(defaultValue, type);
}
}
return parameterSchemaMap.values();
}
}
| src/lib/stored-procedure/stored-procedure-metadata-manager.ts | Falven-mssql-data-source-bca6621 | [
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " const preparedResult = this.prepareStoredProcedureResult(result, info);\n logPerformance(logger, 'prepareStoredProcedureResult', startTime);\n return preparedResult;\n }\n private prepareParameters(\n storedProcedureParameters: IterableIterator<StoredProcedureParameter>,\n input: InputParameters,\n ): Map<string, PreparedStoredProcedureParameter> {\n // We want to use the inferred DB Stored Procedure schema as the source of truth.\n const preparedParameters = new Map<string, PreparedStoredProcedureParameter>();",
"score": 0.8955895304679871
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " if (preparedParameter != null) {\n preparedParameter.value = inputParameters[inputParameterKey];\n }\n // We don't care about provided input parameters that are missing in the Stored Procedure definition.\n }\n return preparedParameters;\n }\n private getMissingRequiredParameters(\n parameters: Map<string, PreparedStoredProcedureParameter>,\n ): PreparedStoredProcedureParameter[] {",
"score": 0.8332355618476868
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " */\n private prepareStoredProcedureResult<T extends Record<string, unknown>>(\n result: IProcedureResult<T>,\n info?: GraphQLResolveInfo,\n ): IResolverProcedureResult<T> {\n const { resultSetFields, outputFields } =\n info !== undefined\n ? {\n resultSetFields: getNodeSelectionSetNames(info, 'resultSets'),\n outputFields: getFieldNamesExcludingNode(info, 'resultSets'),",
"score": 0.8285888433456421
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " private addParametersToRequest(\n parameters: Map<string, PreparedStoredProcedureParameter>,\n request: Request,\n ): Request {\n const preparedRequest = request;\n for (const parameter of parameters.values()) {\n const { name, type, mode, value, defaultValue } = parameter;\n if (defaultValue !== undefined && value === undefined) {\n continue;\n }",
"score": 0.8226631879806519
},
{
"filename": "src/lib/datasource/mssql-datasource.ts",
"retrieved_chunk": " storedProcedureName: string,\n input: InputParameters,\n info?: GraphQLResolveInfo,\n ): Promise<IResolverProcedureResult<T>> {\n const startTime = performance.now();\n const logger = this._mutationLogger;\n logExecutionBegin(logger, `Stored Procedure Mutation ${storedProcedureName}`, input);\n const result = await this._databaseExecutor.executeMutationRequest(\n async (request: Request): Promise<IResolverProcedureResult<T>> =>\n await this._storedProcedureManager.executeStoredProcedure(",
"score": 0.8097193241119385
}
] | typescript | parameterMap.set(item.name, item); |
import sourceToCST, {
ConcreteAttributeNode,
ConcreteElementOpeningTagNode,
ConcreteElementSelfClosingTagNode,
ConcreteLiquidDropNode,
ConcreteNode,
ConcreteNodeTypes,
ConcreteTextNode,
} from '../1-source-to-cst';
import { UnknownConcreteNodeTypeError } from '../errors';
import ASTBuilder from './ast-builder';
export type BasicNode<T> = {
type: T;
locStart: number;
locEnd: number;
source: string;
};
export enum NodeTypes {
TextNode = 'TextNode',
LiquidDropNode = 'LiquidDropNode',
ElementNode = 'ElementNode',
AttributeDoubleQuoted = 'AttributeDoubleQuoted',
AttributeSingleQuoted = 'AttributeSingleQuoted',
AttributeUnquoted = 'AttributeUnquoted',
AttributeEmpty = 'AttributeEmpty',
}
export type TextNode = {
value: string;
} & BasicNode<NodeTypes.TextNode>;
export type LiquidDropNode = {
value: string;
} & BasicNode<NodeTypes.LiquidDropNode>;
export type LiquidXNode = TextNode | LiquidDropNode | ElementNode | AttributeNode;
export type ElementNode = {
name: string;
source: string;
attributes: AttributeNode[];
children: LiquidXNode[];
} & BasicNode<NodeTypes.ElementNode>;
export type AttributeNode =
| AttributeDoubleQuoted
| AttributeSingleQuoted
| AttributeUnquoted
| AttributeEmpty;
export type AttributeNodeBase<T> = {
name: TextNode;
value: TextNode | LiquidDropNode;
} & BasicNode<T>;
export type AttributeDoubleQuoted = {} & AttributeNodeBase<NodeTypes.AttributeDoubleQuoted>;
export type AttributeSingleQuoted = {} & AttributeNodeBase<NodeTypes.AttributeSingleQuoted>;
export type AttributeUnquoted = {} & AttributeNodeBase<NodeTypes.AttributeUnquoted>;
export type AttributeEmpty = { name: TextNode } & BasicNode<NodeTypes.AttributeEmpty>;
function toTextNode(node: ConcreteTextNode): TextNode {
return {
type: NodeTypes.TextNode,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
value: node.value,
};
}
function toLiquidDropNode(node: ConcreteLiquidDropNode): LiquidDropNode {
return {
type: NodeTypes.LiquidDropNode,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
value: node.value,
};
}
function toElementNode(
node: ConcreteElementOpeningTagNode | ConcreteElementSelfClosingTagNode,
): ElementNode {
return {
type: NodeTypes.ElementNode,
locStart: node.locStart,
locEnd: node.locEnd,
name: node.name,
source: node.source,
attributes: toAttributes(node.attributes),
children: [],
};
}
function toAttributes(attributes: ConcreteAttributeNode[]) {
return cstToAST(attributes) as AttributeNode[];
}
function toAttributeValue(value: ConcreteTextNode | ConcreteLiquidDropNode) {
return cstToAST([value])[0] as TextNode | LiquidDropNode;
}
function isAttributeNode(node: any): boolean {
return (
node.type === ConcreteNodeTypes.AttributeDoubleQuoted ||
node.type === ConcreteNodeTypes.AttributeSingleQuoted ||
node.type === ConcreteNodeTypes.AttributeUnquoted ||
node.type === ConcreteNodeTypes.AttributeEmpty
);
}
function cstToAST(cst: ConcreteNode[] | ConcreteAttributeNode[]) {
if (cst.length === 0) return [];
const astBuilder = new ASTBuilder(cst[0].source);
for (let i = 0; i < cst.length; i += 1) {
const node = cst[i];
const prevNode = cst[i - 1];
// Add whitespaces and linebreaks that went missing after parsing. We don't need to do this
// if the node is an attribute since whitespaces between attributes is not important to preserve.
// In fact it would probably break the rendered output due to unexpected text nodes.
// TODO: This should be handled in the grammar/source-to-cst part instead (if possible).
if (prevNode?.source && !isAttributeNode(node)) {
const diff = node.locStart - prevNode.locEnd;
if (diff > 0) {
astBuilder. | push(
toTextNode({ |
type: ConcreteNodeTypes.TextNode,
locStart: prevNode.locEnd,
locEnd: node.locStart,
source: node.source,
value: prevNode.source.slice(prevNode.locEnd, node.locStart),
}),
);
}
}
switch (node.type) {
case ConcreteNodeTypes.TextNode: {
astBuilder.push(toTextNode(node));
break;
}
case ConcreteNodeTypes.LiquidDropNode: {
astBuilder.push(toLiquidDropNode(node));
break;
}
case ConcreteNodeTypes.ElementOpeningTag: {
astBuilder.open(toElementNode(node));
break;
}
case ConcreteNodeTypes.ElementClosingTag: {
astBuilder.close(node, NodeTypes.ElementNode);
break;
}
case ConcreteNodeTypes.ElementSelfClosingTag: {
astBuilder.open(toElementNode(node));
astBuilder.close(node, NodeTypes.ElementNode);
break;
}
case ConcreteNodeTypes.AttributeDoubleQuoted:
case ConcreteNodeTypes.AttributeSingleQuoted:
case ConcreteNodeTypes.AttributeUnquoted: {
const attributeNode: AttributeDoubleQuoted | AttributeSingleQuoted | AttributeUnquoted = {
type: node.type as unknown as
| NodeTypes.AttributeDoubleQuoted
| NodeTypes.AttributeSingleQuoted
| NodeTypes.AttributeUnquoted,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
name: cstToAST([node.name])[0] as TextNode,
value: toAttributeValue(node.value),
};
astBuilder.push(attributeNode);
break;
}
case ConcreteNodeTypes.AttributeEmpty: {
const attributeNode: AttributeEmpty = {
type: NodeTypes.AttributeEmpty,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
name: cstToAST([node.name])[0] as TextNode,
};
astBuilder.push(attributeNode);
break;
}
default: {
throw new UnknownConcreteNodeTypeError(
'',
(node as any)?.source,
(node as any)?.locStart,
(node as any)?.locEnd,
);
}
}
}
return astBuilder.finish();
}
export default function sourceToAST(source: string): LiquidXNode[] {
const cst = sourceToCST(source);
const ast = cstToAST(cst);
return ast;
}
| src/parser/2-cst-to-ast/index.ts | unshopable-liquidx-a101873 | [
{
"filename": "src/renderer/index.ts",
"retrieved_chunk": " { withSource = false, isChildOfElementNode = false } = {},\n): string {\n let output = '';\n for (let i = 0; i < ast.length; i += 1) {\n const node = ast[i];\n switch (node.type) {\n case NodeTypes.TextNode: {\n output += renderText(node);\n break;\n }",
"score": 0.8368293046951294
},
{
"filename": "src/renderer/index.ts",
"retrieved_chunk": " value: 'children',\n },\n value: {\n type: NodeTypes.LiquidDropNode,\n locStart: 0,\n locEnd: 0,\n source: '',\n value: captureName,\n },\n };",
"score": 0.7939355373382568
},
{
"filename": "src/parser/1-source-to-cst/index.ts",
"retrieved_chunk": " return this.sourceString;\n },\n source,\n };\n const mapping: Mapping = {\n Node: 0,\n TextNode: textNode,\n liquidDropNode: {\n type: ConcreteNodeTypes.LiquidDropNode,\n locStart,",
"score": 0.7932573556900024
},
{
"filename": "src/parser/errors.ts",
"retrieved_chunk": " locStart: number;\n locEnd: number;\n};\nclass LoggableError extends Error {\n constructor(info: ErrorResult | ErrorSource) {\n let result = '';\n if (typeof info.result === 'undefined') {\n const { message, source, locStart, locEnd } = info;\n const lc = lineColumn(source);\n const start = lc.fromIndex(locStart);",
"score": 0.7831389307975769
},
{
"filename": "src/renderer/index.ts",
"retrieved_chunk": " case NodeTypes.ElementNode: {\n output += renderElement(node, { withSource, isChildOfElementNode });\n break;\n }\n case NodeTypes.AttributeDoubleQuoted:\n case NodeTypes.AttributeSingleQuoted:\n case NodeTypes.AttributeUnquoted: {\n const name = renderText(node.name);\n let value = null;\n if (node.value.type === NodeTypes.TextNode) {",
"score": 0.7767634391784668
}
] | typescript | push(
toTextNode({ |
import { camelCase } from 'lodash';
import { type Request, type IProcedureResult, type IResult, type IRecordSet } from 'mssql';
import { type GraphQLResolveInfo } from 'graphql';
import {
type DriverType,
type PreparedStoredProcedureParameter,
ParameterMode,
type StoredProcedureSchema,
type StoredProcedureParameter,
type ILogger,
type InputParameters,
} from '../types';
import { mapDbTypeToDriverType, replacer } from '../utils';
import { logExecutionBegin, logPerformance, logSafely } from '../logging';
import {
type StoredProcedureCacheManager,
type StoredProcedureMetadataManager,
} from '../stored-procedure';
import { type IResolverProcedureResult } from '../types/i-resolver-procedure-result';
import { getNodeSelectionSetNames, getFieldNamesExcludingNode } from '../utils/graphql-helper';
/**
* StoredProcedureManager provides methods to interact
* with a Microsoft SQL Server database for managing stored procedures.
*/
export class StoredProcedureManager {
/**
* Creates a new instance of StoredProcedureManager.
*/
constructor(
private readonly _storedProcedureCacheManager: StoredProcedureCacheManager,
private readonly _storedProcedureMetadataManager: StoredProcedureMetadataManager,
) {}
/**
* Executes a stored procedure with the provided input parameters, and returns the result.
* @template TVal - The type of records in the result set.
* @template TRet - The type of the result object to be returned.
* @param {string} storedProcedureName - The name of the stored procedure to execute.
* @param {StoredProcedureInput} input - The input parameters for the stored procedure.
* @param {Request} request - The request to execute the stored procedure.
* @param {GraphQLResolveInfo | undefined} info - If provided, will be used to case-insensitively map the stored
* procedure results to the correct schema field names.
* @param {ILogger} logger - The logger to use for logging.
* @returns A Promise that resolves to the result of the stored procedure execution.
*/
public async executeStoredProcedure<T>(
storedProcedureName: string,
input: InputParameters,
request: Request,
logger: ILogger,
info?: GraphQLResolveInfo,
): Promise<IResolverProcedureResult<T>> {
let startTime = performance.now();
let schema = (await this._storedProcedureCacheManager.tryGetFromCache(storedProcedureName)) as
| IResult<StoredProcedureSchema>
| undefined;
if (schema === undefined) {
logSafely(
logger,
'info',
// Yellow
`\x1b[33mCache miss occurred while retrieving the cached schema for ${storedProcedureName}\x1b[0m`,
);
schema = await this._storedProcedureMetadataManager.getStoredProcedureParameterSchema(
storedProcedureName,
logger,
);
await this._storedProcedureCacheManager.addToCache(storedProcedureName, schema);
} else {
logSafely(
logger,
'info',
// Green
`\x1b[32mCache hit occurred while retrieving the cached schema for ${storedProcedureName}\x1b[0m`,
);
}
logPerformance(logger, 'getStoredProcedureParameterSchema', startTime);
startTime = performance.now();
const storedProcedureParameters =
this._storedProcedureMetadataManager.parseStoredProcedureParameters(
storedProcedureName,
schema,
);
logPerformance(logger, 'parseStoredProcedureParameters', startTime);
startTime = performance.now();
const preparedRequest = this.prepareStoredProcedureRequest(
storedProcedureParameters,
input,
request,
);
logPerformance(logger, 'prepareStoredProcedureRequest', startTime);
startTime = performance.now();
logExecutionBegin(
logger,
`Stored Procedure ${storedProcedureName} with parameters`,
preparedRequest.parameters,
// Green
'32m',
);
const result = await preparedRequest.execute(storedProcedureName);
startTime = performance.now();
const preparedResult = this.prepareStoredProcedureResult(result, info);
logPerformance(logger, 'prepareStoredProcedureResult', startTime);
return preparedResult;
}
private prepareParameters(
storedProcedureParameters: IterableIterator<StoredProcedureParameter>,
input: InputParameters,
): Map<string, PreparedStoredProcedureParameter> {
// We want to use the inferred DB Stored Procedure schema as the source of truth.
const preparedParameters = new Map<string, PreparedStoredProcedureParameter>();
for (const spParameter of storedProcedureParameters) {
const { name, type, length, precision, scale, ...rest } = spParameter;
const parameterName = name.slice(1);
// Let's use the parameter name in lowercase as the lookup key.
preparedParameters.set(parameterName.toLowerCase(), {
name: parameterName,
type: mapDbTypeToDriverType({
type,
length,
precision,
scale,
}) as DriverType,
value: undefined,
...rest,
});
}
// Populate our input values into the request parameters.
const inputParameters = input as Record<string, unknown>;
for (const inputParameterKey in inputParameters) {
const preparedParameter = preparedParameters.get(inputParameterKey.toLowerCase());
if (preparedParameter != null) {
preparedParameter.value = inputParameters[inputParameterKey];
}
// We don't care about provided input parameters that are missing in the Stored Procedure definition.
}
return preparedParameters;
}
private getMissingRequiredParameters(
parameters: Map<string, PreparedStoredProcedureParameter>,
): PreparedStoredProcedureParameter[] {
// Check what required parameters are missing.
const missingRequiredParameters = [];
for (const parameter of parameters.values()) {
// If they have a default value they can be ommitted from the request.
if (parameter.defaultValue === undefined && parameter.value === undefined) {
missingRequiredParameters.push(parameter);
}
}
return missingRequiredParameters;
}
private addParametersToRequest(
parameters: Map<string, PreparedStoredProcedureParameter>,
request: Request,
): Request {
const preparedRequest = request;
for (const parameter of parameters.values()) {
const { name, type, mode, value, defaultValue } = parameter;
if (defaultValue !== undefined && value === undefined) {
continue;
}
const modeEnum = mode;
if (modeEnum === ParameterMode.IN) {
preparedRequest.input(name, type, value);
} else if (modeEnum === ParameterMode.INOUT) {
preparedRequest.output(name, type, value);
} else {
throw new Error(`Unknown parameter mode: ${mode}`);
}
}
return preparedRequest;
}
/**
* Prepares the stored procedure request.
* @param {IterableIterator<StoredProcedureParameter>} storedProcedureParameters - The stored procedure parameters.
* @param {StoredProcedureInput} input - The input object.
* @param {Request} request - The request object.
* @returns A prepared request object.
*/
private prepareStoredProcedureRequest(
storedProcedureParameters: IterableIterator<StoredProcedureParameter>,
input: InputParameters,
request: Request,
): Request {
const parameters = this.prepareParameters(storedProcedureParameters, input);
const missingRequiredParameters = this.getMissingRequiredParameters(parameters);
const missingLength = missingRequiredParameters.length;
if (missingLength > 0) {
throw new Error(
`Missing ${missingLength} required parameters: ${missingRequiredParameters
.map((param) => JSON.stringify(param, replacer, 0))
.join(', ')}.`,
);
}
const preparedRequest = this.addParametersToRequest(parameters, request);
return preparedRequest;
}
/**
* Maps the keys of an object based on the provided mapping.
* @template T - The type of the original object.
* @param {T} obj - The object whose keys need to be mapped.
* @param {Record<string, string>} mapping - A dictionary containing the mapping of the original keys to the new keys.
* @returns {T} A new object with the keys mapped according to the provided mapping.
*/
private mapKeysWithMapping<T extends Record<string, unknown>>(
obj: T,
mapping: Record<string, string>,
): T {
const result: Record<string, unknown> = {};
for (const key in obj) {
const mappedKey = mapping[key.toLowerCase()] ?? camelCase(key);
result[mappedKey] = obj[key];
}
return result as T;
}
/**
* Prepares the stored procedure result into a GraphQL result object.
* @param {IProcedureResult} result - The stored procedure result.
* @param {GraphQLResolveInfo | undefined} info - If provided, will be used to case-insensitively map the stored
* procedure results to the correct schema field names.
* @returns {IResolverProcedureResult} A prepared GraphQL result object.
*/
private prepareStoredProcedureResult<T extends Record<string, unknown>>(
result: IProcedureResult<T>,
info?: GraphQLResolveInfo,
): IResolverProcedureResult<T> {
const { resultSetFields, outputFields } =
info !== undefined
? {
resultSetFields: | getNodeSelectionSetNames(info, 'resultSets'),
outputFields: getFieldNamesExcludingNode(info, 'resultSets'),
} |
: { resultSetFields: {}, outputFields: {} };
const resultSets = result.recordsets.map((recordset: IRecordSet<Record<string, unknown>>) => {
return recordset.map((record: Record<string, unknown>) =>
this.mapKeysWithMapping(record, resultSetFields),
);
});
const output = this.mapKeysWithMapping(result.output, outputFields);
const preparedResult = {
returnValue: result.returnValue,
resultSets: resultSets as T[][],
rowsAffected: result.rowsAffected,
...output,
};
return preparedResult;
}
}
| src/lib/stored-procedure/stored-procedure-manager.ts | Falven-mssql-data-source-bca6621 | [
{
"filename": "src/lib/utils/graphql-helper.ts",
"retrieved_chunk": " info: GraphQLResolveInfo,\n nodeName: string,\n): Record<string, string> {\n const siblingFields: Record<string, string> = {};\n info.fieldNodes.forEach((fieldNode) => {\n visit(fieldNode, {\n Field(node) {\n const isTargetNode = node.name.value.toLowerCase() === nodeName.toLowerCase();\n if (isTargetNode) {\n return false;",
"score": 0.8348248600959778
},
{
"filename": "src/lib/utils/graphql-helper.ts",
"retrieved_chunk": " info: GraphQLResolveInfo,\n nodeName: string,\n): Record<string, string> {\n const targetNode = findNodeByName(info, nodeName);\n // If the target node is not found, return an empty dictionary\n if (targetNode === undefined) {\n return {};\n }\n // If the target node is found, return its subfield names\n return getSelectionSetNames(targetNode);",
"score": 0.8188772201538086
},
{
"filename": "src/lib/utils/graphql-helper.ts",
"retrieved_chunk": "import { type GraphQLResolveInfo, type FieldNode, visit } from 'graphql';\n/**\n * Find the first node in the GraphQLResolveInfo with the given case insensitive name.\n * @param {GraphQLResolveInfo} info - The GraphQL resolve information object.\n * @param {string} nodeName - The case insensitive name of the node to find.\n * @returns {FieldNode | undefined} - The found node, or undefined if not found.\n */\nexport function findNodeByName(info: GraphQLResolveInfo, nodeName: string): FieldNode | undefined {\n let targetNode: FieldNode | undefined;\n // Iterate through the fieldNodes, stopping when the target node is found",
"score": 0.8045140504837036
},
{
"filename": "src/lib/utils/graphql-helper.ts",
"retrieved_chunk": " const subfieldNames: Record<string, string> = {};\n // If the target node has a selection set, visit its subfields and collect their names\n if (targetNode.selectionSet !== undefined) {\n visit(targetNode.selectionSet, {\n Field(node) {\n subfieldNames[node.name.value.toLowerCase()] = node.name.value;\n },\n });\n }\n return subfieldNames;",
"score": 0.7885913848876953
},
{
"filename": "src/lib/executor/database-executor.ts",
"retrieved_chunk": " requestFn,\n logger,\n );\n }\n /**\n * @inheritdoc\n */\n public async executeMutationRequest<T>(\n requestFn: (request: Request) => Promise<T>,\n logger: ILogger,",
"score": 0.7828041315078735
}
] | typescript | getNodeSelectionSetNames(info, 'resultSets'),
outputFields: getFieldNamesExcludingNode(info, 'resultSets'),
} |
/* eslint-disable no-await-in-loop */
import dotenv from 'dotenv';
import { OpenAIChat } from 'langchain/llms/openai';
// eslint-disable-next-line import/no-unresolved
import * as readline from 'node:readline/promises';
import path from 'path';
import fs from 'fs';
/* This line of code is importing the `stdin` and `stdout` streams from the `process` module in
Node.js. These streams are used for reading input from the user and writing output to the console,
respectively. */
import { stdin as input, stdout as output } from 'node:process';
import { CallbackManager } from 'langchain/callbacks';
import { ChatPromptTemplate, HumanMessagePromptTemplate, SystemMessagePromptTemplate } from 'langchain/prompts';
import { LLMChain } from 'langchain/chains';
import { oneLine } from 'common-tags';
import chalk from 'chalk';
import logChat from './chatLogger.js';
import createCommandHandler from './commands.js';
import { getMemoryVectorStore, addDocumentsToMemoryVectorStore, getBufferWindowMemory } from './lib/memoryManager.js';
import { getContextVectorStore } from './lib/contextManager.js';
import { getRelevantContext } from './lib/vectorStoreUtils.js';
import sanitizeInput from './utils/sanitizeInput.js';
import { getConfig, getProjectRoot } from './config/index.js';
const projectRootDir = getProjectRoot();
dotenv.config();
// Set up the chat log directory
const chatLogDirectory = path.join(projectRootDir, 'chat_logs');
// Get the prompt template
const systemPromptTemplate = fs.readFileSync(path.join(projectRootDir, 'src/prompt.txt'), 'utf8');
// Set up the readline interface to read input from the user and write output to the console
const rl = readline.createInterface({ input, output });
// Set up CLI commands
const commandHandler: CommandHandler = createCommandHandler();
const callbackManager = CallbackManager.fromHandlers({
// This function is called when the LLM generates a new token (i.e., a prediction for the next word)
async handleLLMNewToken(token: string) {
// Write the token to the output stream (i.e., the console)
output.write(token);
},
});
const llm = new OpenAIChat({
streaming: true,
callbackManager,
modelName: process.env.MODEL || 'gpt-3.5-turbo',
});
const systemPrompt = SystemMessagePromptTemplate.fromTemplate(oneLine`
${systemPromptTemplate}
`);
const chatPrompt = ChatPromptTemplate.fromPromptMessages([
systemPrompt,
HumanMessagePromptTemplate.fromTemplate('QUESTION: """{input}"""'),
]);
const windowMemory = getBufferWindowMemory();
const chain = new LLMChain({
prompt: chatPrompt,
memory: windowMemory,
llm,
});
// eslint-disable-next-line no-constant-condition
while (true) {
output.write(chalk.green('\nStart chatting or type /help for a list of commands\n'));
const userInput = await rl.question('> ');
let response;
if (userInput.startsWith('/')) {
const [command, ...args] = userInput.slice(1).split(' ');
await commandHandler.execute(command, args, output);
} else {
| const memoryVectorStore = await getMemoryVectorStore(); |
const contextVectorStore = await getContextVectorStore();
const question = sanitizeInput(userInput);
const config = getConfig();
const context = await getRelevantContext(contextVectorStore, question, config.numContextDocumentsToRetrieve);
const history = await getRelevantContext(memoryVectorStore, question, config.numMemoryDocumentsToRetrieve);
try {
response = await chain.call({
input: question,
context,
history,
immediate_history: config.useWindowMemory ? windowMemory : '',
});
if (response) {
await addDocumentsToMemoryVectorStore([
{ content: question, metadataType: 'question' },
{ content: response.text, metadataType: 'answer' },
]);
await logChat(chatLogDirectory, question, response.response);
}
} catch (error) {
if (error instanceof Error && error.message.includes('Cancel:')) {
// TODO: Handle cancel
} else if (error instanceof Error) {
output.write(chalk.red(error.message));
} else {
output.write(chalk.red(error));
}
}
}
output.write('\n');
}
| src/index.ts | gmickel-memorybot-bad0302 | [
{
"filename": "src/commands.ts",
"retrieved_chunk": " getCommands,\n async execute(commandName: string, args: string[], output: NodeJS.WriteStream) {\n const command = commands.find((cmd) => cmd.name === commandName || cmd.aliases.includes(commandName));\n if (command) {\n await command.execute(args, output, commandHandler);\n } else {\n output.write(chalk.red('Unknown command. Type /help to see the list of available commands.\\n'));\n }\n },\n };",
"score": 0.8357644081115723
},
{
"filename": "src/commands/helpCommand.ts",
"retrieved_chunk": "import chalk from 'chalk';\nimport createCommand from './command.js';\nconst helpCommand = createCommand(\n 'help',\n ['h', '?'],\n 'Show the list of available commands',\n (_args, output, commandHandler) =>\n new Promise<void>((resolve) => {\n output.write(chalk.blue('Usage:\\n'));\n output.write('Ask memorybot to write some marketing materials and press enter.\\n');",
"score": 0.8295903205871582
},
{
"filename": "src/commands/resetChatCommand.ts",
"retrieved_chunk": "import chalk from 'chalk';\nimport createCommand from './command.js';\nimport { resetBufferWindowMemory, resetMemoryVectorStore, setMemoryVectorStore } from '../lib/memoryManager.js';\nconst resetChatCommand = createCommand(\n 'reset',\n [],\n 'Resets the chat and starts a new conversation - This clears the memory vector store and the buffer window memory.',\n async (_args, output) => {\n output.write(chalk.yellow('\\nResetting the chat!\\n'));\n await resetMemoryVectorStore((newMemoryVectorStore) => {",
"score": 0.8036856651306152
},
{
"filename": "src/commands/switchContextStoreCommand.ts",
"retrieved_chunk": " if (!args || args.length !== 1) {\n output.write(chalk.red('Invalid number of arguments. Usage: /change-context-store `subdirectory`\\n'));\n return;\n }\n const subDirectory = args[0];\n await loadOrCreateEmptyVectorStore(subDirectory);\n }\n);\nexport default changeContextStoreCommand;",
"score": 0.7936467528343201
},
{
"filename": "src/commands/listContextStoresCommand.ts",
"retrieved_chunk": "import chalk from 'chalk';\nimport createCommand from './command.js';\nimport { listContextStores } from '../lib/contextManager.js';\nconst listContextStoresCommand = createCommand(\n 'list-context-stores',\n ['lcs'],\n `Lists all available context vector stores and their details.\\n`,\n async (args, output) => {\n if (!args || args.length > 0) {\n output.write(chalk.red('Invalid number of arguments. Usage: /list-context-stores\\n'));",
"score": 0.7927542328834534
}
] | typescript | const memoryVectorStore = await getMemoryVectorStore(); |
/* eslint-disable no-await-in-loop */
import dotenv from 'dotenv';
import { OpenAIChat } from 'langchain/llms/openai';
// eslint-disable-next-line import/no-unresolved
import * as readline from 'node:readline/promises';
import path from 'path';
import fs from 'fs';
/* This line of code is importing the `stdin` and `stdout` streams from the `process` module in
Node.js. These streams are used for reading input from the user and writing output to the console,
respectively. */
import { stdin as input, stdout as output } from 'node:process';
import { CallbackManager } from 'langchain/callbacks';
import { ChatPromptTemplate, HumanMessagePromptTemplate, SystemMessagePromptTemplate } from 'langchain/prompts';
import { LLMChain } from 'langchain/chains';
import { oneLine } from 'common-tags';
import chalk from 'chalk';
import logChat from './chatLogger.js';
import createCommandHandler from './commands.js';
import { getMemoryVectorStore, addDocumentsToMemoryVectorStore, getBufferWindowMemory } from './lib/memoryManager.js';
import { getContextVectorStore } from './lib/contextManager.js';
import { getRelevantContext } from './lib/vectorStoreUtils.js';
import sanitizeInput from './utils/sanitizeInput.js';
import { getConfig, getProjectRoot } from './config/index.js';
const projectRootDir = getProjectRoot();
dotenv.config();
// Set up the chat log directory
const chatLogDirectory = path.join(projectRootDir, 'chat_logs');
// Get the prompt template
const systemPromptTemplate = fs.readFileSync(path.join(projectRootDir, 'src/prompt.txt'), 'utf8');
// Set up the readline interface to read input from the user and write output to the console
const rl = readline.createInterface({ input, output });
// Set up CLI commands
const commandHandler: CommandHandler = createCommandHandler();
const callbackManager = CallbackManager.fromHandlers({
// This function is called when the LLM generates a new token (i.e., a prediction for the next word)
async handleLLMNewToken(token: string) {
// Write the token to the output stream (i.e., the console)
output.write(token);
},
});
const llm = new OpenAIChat({
streaming: true,
callbackManager,
modelName: process.env.MODEL || 'gpt-3.5-turbo',
});
const systemPrompt = SystemMessagePromptTemplate.fromTemplate(oneLine`
${systemPromptTemplate}
`);
const chatPrompt = ChatPromptTemplate.fromPromptMessages([
systemPrompt,
HumanMessagePromptTemplate.fromTemplate('QUESTION: """{input}"""'),
]);
const windowMemory = getBufferWindowMemory();
const chain = new LLMChain({
prompt: chatPrompt,
memory: windowMemory,
llm,
});
// eslint-disable-next-line no-constant-condition
while (true) {
output.write(chalk.green('\nStart chatting or type /help for a list of commands\n'));
const userInput = await rl.question('> ');
let response;
if (userInput.startsWith('/')) {
const [command, ...args] = userInput.slice(1).split(' ');
await commandHandler.execute(command, args, output);
} else {
const memoryVectorStore = await getMemoryVectorStore();
const contextVectorStore = await getContextVectorStore();
const question = sanitizeInput(userInput);
const config = getConfig();
const context = await getRelevantContext(contextVectorStore, question, config.numContextDocumentsToRetrieve);
const history = await getRelevantContext(memoryVectorStore, question, config.numMemoryDocumentsToRetrieve);
try {
response = await chain.call({
input: question,
context,
history,
immediate_history: config.useWindowMemory ? windowMemory : '',
});
if (response) {
await addDocumentsToMemoryVectorStore([
{ content: question, metadataType: 'question' },
{ content: response.text, metadataType: 'answer' },
]);
await | logChat(chatLogDirectory, question, response.response); |
}
} catch (error) {
if (error instanceof Error && error.message.includes('Cancel:')) {
// TODO: Handle cancel
} else if (error instanceof Error) {
output.write(chalk.red(error.message));
} else {
output.write(chalk.red(error));
}
}
}
output.write('\n');
}
| src/index.ts | gmickel-memorybot-bad0302 | [
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": " documents: Array<{ content: string; metadataType: string }>\n): Promise<void> {\n const formattedDocuments = documents.map(\n (doc) => new Document({ pageContent: doc.content, metadata: { type: doc.metadataType } })\n );\n await memoryWrapper.vectorStoreInstance.addDocuments(formattedDocuments);\n await saveMemoryVectorStore();\n}\nfunction resetBufferWindowMemory() {\n bufferWindowMemory.clear();",
"score": 0.8016698360443115
},
{
"filename": "src/lib/contextManager.ts",
"retrieved_chunk": " pageContent: text,\n }),\n ]);\n const vectorStore = await getContextVectorStore();\n await vectorStore.addDocuments(videoDocs);\n await vectorStore.save(dbDirectory);\n spinner.succeed();\n return;\n } catch (error) {\n if (spinner) {",
"score": 0.7604093551635742
},
{
"filename": "src/commands/resetChatCommand.ts",
"retrieved_chunk": "import chalk from 'chalk';\nimport createCommand from './command.js';\nimport { resetBufferWindowMemory, resetMemoryVectorStore, setMemoryVectorStore } from '../lib/memoryManager.js';\nconst resetChatCommand = createCommand(\n 'reset',\n [],\n 'Resets the chat and starts a new conversation - This clears the memory vector store and the buffer window memory.',\n async (_args, output) => {\n output.write(chalk.yellow('\\nResetting the chat!\\n'));\n await resetMemoryVectorStore((newMemoryVectorStore) => {",
"score": 0.7450785636901855
},
{
"filename": "src/lib/contextManager.ts",
"retrieved_chunk": " const pages = (await crawler.start()) as Page[];\n documents = await Promise.all(\n pages.map((row) => {\n const splitter = new RecursiveCharacterTextSplitter();\n const webDocs = splitter.splitDocuments([\n new Document({\n pageContent: row.text,\n }),\n ]);\n return webDocs;",
"score": 0.7296069264411926
},
{
"filename": "src/global.d.ts",
"retrieved_chunk": "type Page = {\n url: string;\n text: string;\n title: string;\n};\ninterface Config {\n currentVectorStoreDatabasePath: string;\n numContextDocumentsToRetrieve: number;\n numMemoryDocumentsToRetrieve: number;\n useWindowMemory: boolean;",
"score": 0.7267088294029236
}
] | typescript | logChat(chatLogDirectory, question, response.response); |
/* eslint-disable no-await-in-loop */
import dotenv from 'dotenv';
import { OpenAIChat } from 'langchain/llms/openai';
// eslint-disable-next-line import/no-unresolved
import * as readline from 'node:readline/promises';
import path from 'path';
import fs from 'fs';
/* This line of code is importing the `stdin` and `stdout` streams from the `process` module in
Node.js. These streams are used for reading input from the user and writing output to the console,
respectively. */
import { stdin as input, stdout as output } from 'node:process';
import { CallbackManager } from 'langchain/callbacks';
import { ChatPromptTemplate, HumanMessagePromptTemplate, SystemMessagePromptTemplate } from 'langchain/prompts';
import { LLMChain } from 'langchain/chains';
import { oneLine } from 'common-tags';
import chalk from 'chalk';
import logChat from './chatLogger.js';
import createCommandHandler from './commands.js';
import { getMemoryVectorStore, addDocumentsToMemoryVectorStore, getBufferWindowMemory } from './lib/memoryManager.js';
import { getContextVectorStore } from './lib/contextManager.js';
import { getRelevantContext } from './lib/vectorStoreUtils.js';
import sanitizeInput from './utils/sanitizeInput.js';
import { getConfig, getProjectRoot } from './config/index.js';
const projectRootDir = getProjectRoot();
dotenv.config();
// Set up the chat log directory
const chatLogDirectory = path.join(projectRootDir, 'chat_logs');
// Get the prompt template
const systemPromptTemplate = fs.readFileSync(path.join(projectRootDir, 'src/prompt.txt'), 'utf8');
// Set up the readline interface to read input from the user and write output to the console
const rl = readline.createInterface({ input, output });
// Set up CLI commands
const commandHandler: CommandHandler = createCommandHandler();
const callbackManager = CallbackManager.fromHandlers({
// This function is called when the LLM generates a new token (i.e., a prediction for the next word)
async handleLLMNewToken(token: string) {
// Write the token to the output stream (i.e., the console)
output.write(token);
},
});
const llm = new OpenAIChat({
streaming: true,
callbackManager,
modelName: process.env.MODEL || 'gpt-3.5-turbo',
});
const systemPrompt = SystemMessagePromptTemplate.fromTemplate(oneLine`
${systemPromptTemplate}
`);
const chatPrompt = ChatPromptTemplate.fromPromptMessages([
systemPrompt,
HumanMessagePromptTemplate.fromTemplate('QUESTION: """{input}"""'),
]);
const windowMemory = getBufferWindowMemory();
const chain = new LLMChain({
prompt: chatPrompt,
memory: windowMemory,
llm,
});
// eslint-disable-next-line no-constant-condition
while (true) {
output.write(chalk.green('\nStart chatting or type /help for a list of commands\n'));
const userInput = await rl.question('> ');
let response;
if (userInput.startsWith('/')) {
const [command, ...args] = userInput.slice(1).split(' ');
await commandHandler.execute(command, args, output);
} else {
const memoryVectorStore = await getMemoryVectorStore();
const contextVectorStore = await getContextVectorStore();
const question = sanitizeInput(userInput);
const config = getConfig();
const context = | await getRelevantContext(contextVectorStore, question, config.numContextDocumentsToRetrieve); |
const history = await getRelevantContext(memoryVectorStore, question, config.numMemoryDocumentsToRetrieve);
try {
response = await chain.call({
input: question,
context,
history,
immediate_history: config.useWindowMemory ? windowMemory : '',
});
if (response) {
await addDocumentsToMemoryVectorStore([
{ content: question, metadataType: 'question' },
{ content: response.text, metadataType: 'answer' },
]);
await logChat(chatLogDirectory, question, response.response);
}
} catch (error) {
if (error instanceof Error && error.message.includes('Cancel:')) {
// TODO: Handle cancel
} else if (error instanceof Error) {
output.write(chalk.red(error.message));
} else {
output.write(chalk.red(error));
}
}
}
output.write('\n');
}
| src/index.ts | gmickel-memorybot-bad0302 | [
{
"filename": "src/lib/contextManager.ts",
"retrieved_chunk": " try {\n spinner = ora({ ...defaultOraOptions, text: `Adding files to the Context Vector Store` }).start();\n const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');\n const documents = await Promise.all(\n filePaths.map((filePath) => loadAndSplitFile(path.join(docsDirectory, filePath)))\n );\n const flattenedDocuments = documents.reduce((acc, val) => acc.concat(val), []);\n const vectorStore = await getContextVectorStore();\n await vectorStore.addDocuments(flattenedDocuments);\n await vectorStore.save(dbDirectory);",
"score": 0.8159359693527222
},
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": " documents: Array<{ content: string; metadataType: string }>\n): Promise<void> {\n const formattedDocuments = documents.map(\n (doc) => new Document({ pageContent: doc.content, metadata: { type: doc.metadataType } })\n );\n await memoryWrapper.vectorStoreInstance.addDocuments(formattedDocuments);\n await saveMemoryVectorStore();\n}\nfunction resetBufferWindowMemory() {\n bufferWindowMemory.clear();",
"score": 0.8057152628898621
},
{
"filename": "src/lib/contextManager.ts",
"retrieved_chunk": " pageContent: text,\n }),\n ]);\n const vectorStore = await getContextVectorStore();\n await vectorStore.addDocuments(videoDocs);\n await vectorStore.save(dbDirectory);\n spinner.succeed();\n return;\n } catch (error) {\n if (spinner) {",
"score": 0.7906865477561951
},
{
"filename": "src/lib/contextManager.ts",
"retrieved_chunk": " const dbDirectory = getConfig().currentVectorStoreDatabasePath;\n try {\n vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));\n } catch {\n spinner = ora({\n ...defaultOraOptions,\n text: chalk.blue(`Creating new Context Vector Store in the ${dbDirectory} directory`),\n }).start();\n const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');\n const filesToAdd = await getDirectoryFiles(docsDirectory);",
"score": 0.7906357049942017
},
{
"filename": "src/lib/contextManager.ts",
"retrieved_chunk": " const documents = await Promise.all(filesToAdd.map((filePath) => loadAndSplitFile(filePath)));\n const flattenedDocuments = documents.reduce((acc, val) => acc.concat(val), []);\n vectorStore = await HNSWLib.fromDocuments(flattenedDocuments, new OpenAIEmbeddings({ maxConcurrency: 5 }));\n await vectorStore.save(dbDirectory);\n spinner.succeed();\n }\n return vectorStore;\n}\nconst contextVectorStore = await loadOrCreateVectorStore();\nconst contextWrapper = {",
"score": 0.7848201990127563
}
] | typescript | await getRelevantContext(contextVectorStore, question, config.numContextDocumentsToRetrieve); |
import { type IResult, type Request } from 'mssql';
import type { StoredProcedureParameter, StoredProcedureSchema, ILogger } from '../types';
import { type DatabaseExecutor } from '../executor';
import { convertSqlValueToJsValue } from '../utils';
/**
* A manager for stored procedure metadata.
* Handles the retrieval and caching of stored procedure metadata.
*/
export class StoredProcedureMetadataManager {
/**
* Regular expression to extract MSSQL stored procedure names.
* See https://regex101.com/r/cMsTyT/1 for this regex.
*/
private static readonly storedProcedureNameRegex =
/((?:(?:\[([\w\s]+)\]|(\w+))\.)?(?:\[([\w\s]+)\]|(\w+))\.(?:\[([\w\s]+)\]|(\w+)))/i;
/**
* Matches any comments from the Stored Procedure definition.
* See https://regex101.com/r/dxA7n0/1 for this regex.
*/
private static readonly commentRegex = /(?:\s*-{2}.+\s*$)|(?:\/\*([\s\S]*?)\*\/)/gm;
/**
* Matches the parameters from the Stored Procedure definition.
* See https://regex101.com/r/4TaTky/1 for this regex.
*/
private static readonly parameterSectionRegex =
/(?<=(?:CREATE|ALTER)\s+PROCEDURE)\s+((?:(?:\[([\w\s]+)\]|(\w+))\.)?(?:\[([\w\s]+)\]|(\w+))\.(?:\[([\w\s]+)\]|(\w+)))(.*?)(?=(?:AS|FOR\s+REPLICATION)[^\w])/is;
/**
* See https://regex101.com/r/iMEaLb/1 for this regex.
* Match the individual parameters in the Parameter Definition.
*/
private static readonly parameterDefinitionRegex = /(@[\w]+)\s+([^\s]+)\s*=\s*([^, ]*),?/gi;
constructor(private readonly _databaseExecutor: DatabaseExecutor) {}
/**
* Parses the stored procedure parameter schema into a StoredProcedureParameter array.
* @param {string} storedProcedureName - The name of the stored procedure to retrieve the parameter schema for.
* @returns A Promise that resolves to the result of the stored procedure execution.
*/
public async getStoredProcedureParameterSchema(
storedProcedureName: string,
logger: ILogger,
): Promise<IResult<StoredProcedureSchema>> {
return await this._databaseExecutor.executeQueryRequest(async (request: Request) => {
// Remove square bracket notation if any, and split into schema and name.
const schemaAndName = storedProcedureName.replace(/\[|\]/g, '').split('.');
const result = await request.query<StoredProcedureSchema>(
'SELECT ' +
'PARAMETER_NAME as name, ' +
'DATA_TYPE as type, ' +
'PARAMETER_MODE as mode, ' +
'CHARACTER_MAXIMUM_LENGTH length, ' +
'NUMERIC_PRECISION as precision, ' +
'NUMERIC_SCALE as scale ' +
'FROM INFORMATION_SCHEMA.PARAMETERS ' +
`WHERE SPECIFIC_SCHEMA = '${schemaAndName[0]}' AND SPECIFIC_NAME = '${schemaAndName[1]}';
SELECT OBJECT_DEFINITION(OBJECT_ID('${storedProcedureName}')) AS storedProcedureDefinition;`,
);
const recordSetLength = result.recordsets.length as number;
if (recordSetLength < 1 || recordSetLength > 2) {
throw new Error(
`Could not retrieve stored procedure parameter schema from Database for stored procedure ${storedProcedureName}.`,
);
}
if (recordSetLength !== 2 || result.recordsets[1].length !== 1) {
throw new Error(
`Could not retrieve stored procedure definition from Database for stored procedure ${storedProcedureName}.`,
);
}
return result;
}, logger);
}
/**
* Parses the stored procedure parameter schema into a StoredProcedureParameter array.
* @param {string} storedProcedureName - The name of the stored procedure to parse the parameter schema for.
* @param {IResult<StoredProcedureSchema>} schemaResult - The result of the stored procedure parameter schema query.
* @returns A StoredProcedureParameter array.
*/
public parseStoredProcedureParameters(
storedProcedureName: string,
schemaResult: IResult<StoredProcedureSchema>,
): IterableIterator<StoredProcedureParameter> {
const parameterSchemaMap: Map<string, StoredProcedureParameter> =
schemaResult.recordsets[0].reduce(
(parameterMap: Map<string, StoredProcedureParameter>, item: StoredProcedureParameter) => {
parameterMap.set(item.name, item);
return parameterMap;
},
new Map<string, StoredProcedureParameter>(),
);
const storedProcedureDefinition = schemaResult.recordsets[1][0].storedProcedureDefinition;
if (storedProcedureDefinition == null) {
throw new Error(
`Could not parse stored procedure definition for stored procedure ${storedProcedureName}.`,
);
}
const commentStrippedStoredProcedureDefinition = storedProcedureDefinition.replace(
StoredProcedureMetadataManager.commentRegex,
'',
);
if (commentStrippedStoredProcedureDefinition === '') {
throw new Error(
`Could not parse stored procedure comments from definition for stored procedure ${storedProcedureName}.`,
);
}
const parameterSection = commentStrippedStoredProcedureDefinition.match(
StoredProcedureMetadataManager.parameterSectionRegex,
);
if (parameterSection === null || parameterSection.length !== 9) {
throw new Error(
`Could not parse stored procedure parameters from definition for stored procedure ${storedProcedureName}.`,
);
}
const parameterDefinition = parameterSection[8];
let parameterDefinitionMatch;
while (
(parameterDefinitionMatch =
StoredProcedureMetadataManager.parameterDefinitionRegex.exec(parameterDefinition)) !== null
) {
const name = parameterDefinitionMatch[1];
const type = parameterDefinitionMatch[2];
const defaultValue = parameterDefinitionMatch[3];
const parameter = parameterSchemaMap.get(name);
if (parameter !== undefined) {
parameter | .defaultValue = convertSqlValueToJsValue(defaultValue, type); |
}
}
return parameterSchemaMap.values();
}
}
| src/lib/stored-procedure/stored-procedure-metadata-manager.ts | Falven-mssql-data-source-bca6621 | [
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " for (const spParameter of storedProcedureParameters) {\n const { name, type, length, precision, scale, ...rest } = spParameter;\n const parameterName = name.slice(1);\n // Let's use the parameter name in lowercase as the lookup key.\n preparedParameters.set(parameterName.toLowerCase(), {\n name: parameterName,\n type: mapDbTypeToDriverType({\n type,\n length,\n precision,",
"score": 0.824323296546936
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " if (preparedParameter != null) {\n preparedParameter.value = inputParameters[inputParameterKey];\n }\n // We don't care about provided input parameters that are missing in the Stored Procedure definition.\n }\n return preparedParameters;\n }\n private getMissingRequiredParameters(\n parameters: Map<string, PreparedStoredProcedureParameter>,\n ): PreparedStoredProcedureParameter[] {",
"score": 0.822559118270874
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " private addParametersToRequest(\n parameters: Map<string, PreparedStoredProcedureParameter>,\n request: Request,\n ): Request {\n const preparedRequest = request;\n for (const parameter of parameters.values()) {\n const { name, type, mode, value, defaultValue } = parameter;\n if (defaultValue !== undefined && value === undefined) {\n continue;\n }",
"score": 0.8189826607704163
},
{
"filename": "src/lib/utils/type-map.ts",
"retrieved_chunk": "}: Pick<StoredProcedureParameter, 'type' | 'length' | 'precision' | 'scale'>): ISqlTypeFactory => {\n const types: IndexableTypes = TYPES;\n const property = findPropertyCaseInsensitive(types, type);\n if (property !== null) {\n const typeFactory = types[property as TypesKey];\n if (isSqlTypeFactoryWithNoParams(typeFactory)) {\n return typeFactory();\n } else if (isSqlTypeFactoryWithLength(typeFactory)) {\n return (typeFactory as ISqlTypeFactoryWithLength)(length === -1 ? MAX : length);\n } else if (isSqlTypeFactoryWithScale(typeFactory)) {",
"score": 0.8173438310623169
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " // Check what required parameters are missing.\n const missingRequiredParameters = [];\n for (const parameter of parameters.values()) {\n // If they have a default value they can be ommitted from the request.\n if (parameter.defaultValue === undefined && parameter.value === undefined) {\n missingRequiredParameters.push(parameter);\n }\n }\n return missingRequiredParameters;\n }",
"score": 0.8126237392425537
}
] | typescript | .defaultValue = convertSqlValueToJsValue(defaultValue, type); |
/* eslint-disable no-await-in-loop */
import dotenv from 'dotenv';
import { OpenAIChat } from 'langchain/llms/openai';
// eslint-disable-next-line import/no-unresolved
import * as readline from 'node:readline/promises';
import path from 'path';
import fs from 'fs';
/* This line of code is importing the `stdin` and `stdout` streams from the `process` module in
Node.js. These streams are used for reading input from the user and writing output to the console,
respectively. */
import { stdin as input, stdout as output } from 'node:process';
import { CallbackManager } from 'langchain/callbacks';
import { ChatPromptTemplate, HumanMessagePromptTemplate, SystemMessagePromptTemplate } from 'langchain/prompts';
import { LLMChain } from 'langchain/chains';
import { oneLine } from 'common-tags';
import chalk from 'chalk';
import logChat from './chatLogger.js';
import createCommandHandler from './commands.js';
import { getMemoryVectorStore, addDocumentsToMemoryVectorStore, getBufferWindowMemory } from './lib/memoryManager.js';
import { getContextVectorStore } from './lib/contextManager.js';
import { getRelevantContext } from './lib/vectorStoreUtils.js';
import sanitizeInput from './utils/sanitizeInput.js';
import { getConfig, getProjectRoot } from './config/index.js';
const projectRootDir = getProjectRoot();
dotenv.config();
// Set up the chat log directory
const chatLogDirectory = path.join(projectRootDir, 'chat_logs');
// Get the prompt template
const systemPromptTemplate = fs.readFileSync(path.join(projectRootDir, 'src/prompt.txt'), 'utf8');
// Set up the readline interface to read input from the user and write output to the console
const rl = readline.createInterface({ input, output });
// Set up CLI commands
const commandHandler: CommandHandler = createCommandHandler();
const callbackManager = CallbackManager.fromHandlers({
// This function is called when the LLM generates a new token (i.e., a prediction for the next word)
async handleLLMNewToken(token: string) {
// Write the token to the output stream (i.e., the console)
output.write(token);
},
});
const llm = new OpenAIChat({
streaming: true,
callbackManager,
modelName: process.env.MODEL || 'gpt-3.5-turbo',
});
const systemPrompt = SystemMessagePromptTemplate.fromTemplate(oneLine`
${systemPromptTemplate}
`);
const chatPrompt = ChatPromptTemplate.fromPromptMessages([
systemPrompt,
HumanMessagePromptTemplate.fromTemplate('QUESTION: """{input}"""'),
]);
const windowMemory = getBufferWindowMemory();
const chain = new LLMChain({
prompt: chatPrompt,
memory: windowMemory,
llm,
});
// eslint-disable-next-line no-constant-condition
while (true) {
output.write(chalk.green('\nStart chatting or type /help for a list of commands\n'));
const userInput = await rl.question('> ');
let response;
if (userInput.startsWith('/')) {
const [command, ...args] = userInput.slice(1).split(' ');
await commandHandler.execute(command, args, output);
} else {
const memoryVectorStore = await getMemoryVectorStore();
const contextVectorStore = await getContextVectorStore();
| const question = sanitizeInput(userInput); |
const config = getConfig();
const context = await getRelevantContext(contextVectorStore, question, config.numContextDocumentsToRetrieve);
const history = await getRelevantContext(memoryVectorStore, question, config.numMemoryDocumentsToRetrieve);
try {
response = await chain.call({
input: question,
context,
history,
immediate_history: config.useWindowMemory ? windowMemory : '',
});
if (response) {
await addDocumentsToMemoryVectorStore([
{ content: question, metadataType: 'question' },
{ content: response.text, metadataType: 'answer' },
]);
await logChat(chatLogDirectory, question, response.response);
}
} catch (error) {
if (error instanceof Error && error.message.includes('Cancel:')) {
// TODO: Handle cancel
} else if (error instanceof Error) {
output.write(chalk.red(error.message));
} else {
output.write(chalk.red(error));
}
}
}
output.write('\n');
}
| src/index.ts | gmickel-memorybot-bad0302 | [
{
"filename": "src/commands.ts",
"retrieved_chunk": " getCommands,\n async execute(commandName: string, args: string[], output: NodeJS.WriteStream) {\n const command = commands.find((cmd) => cmd.name === commandName || cmd.aliases.includes(commandName));\n if (command) {\n await command.execute(args, output, commandHandler);\n } else {\n output.write(chalk.red('Unknown command. Type /help to see the list of available commands.\\n'));\n }\n },\n };",
"score": 0.8301985263824463
},
{
"filename": "src/commands/helpCommand.ts",
"retrieved_chunk": "import chalk from 'chalk';\nimport createCommand from './command.js';\nconst helpCommand = createCommand(\n 'help',\n ['h', '?'],\n 'Show the list of available commands',\n (_args, output, commandHandler) =>\n new Promise<void>((resolve) => {\n output.write(chalk.blue('Usage:\\n'));\n output.write('Ask memorybot to write some marketing materials and press enter.\\n');",
"score": 0.828168511390686
},
{
"filename": "src/commands/resetChatCommand.ts",
"retrieved_chunk": "import chalk from 'chalk';\nimport createCommand from './command.js';\nimport { resetBufferWindowMemory, resetMemoryVectorStore, setMemoryVectorStore } from '../lib/memoryManager.js';\nconst resetChatCommand = createCommand(\n 'reset',\n [],\n 'Resets the chat and starts a new conversation - This clears the memory vector store and the buffer window memory.',\n async (_args, output) => {\n output.write(chalk.yellow('\\nResetting the chat!\\n'));\n await resetMemoryVectorStore((newMemoryVectorStore) => {",
"score": 0.8219428062438965
},
{
"filename": "src/commands/listContextStoresCommand.ts",
"retrieved_chunk": "import chalk from 'chalk';\nimport createCommand from './command.js';\nimport { listContextStores } from '../lib/contextManager.js';\nconst listContextStoresCommand = createCommand(\n 'list-context-stores',\n ['lcs'],\n `Lists all available context vector stores and their details.\\n`,\n async (args, output) => {\n if (!args || args.length > 0) {\n output.write(chalk.red('Invalid number of arguments. Usage: /list-context-stores\\n'));",
"score": 0.7870753407478333
},
{
"filename": "src/lib/contextManager.ts",
"retrieved_chunk": " const dbDirectory = getConfig().currentVectorStoreDatabasePath;\n try {\n vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));\n } catch {\n spinner = ora({\n ...defaultOraOptions,\n text: chalk.blue(`Creating new Context Vector Store in the ${dbDirectory} directory`),\n }).start();\n const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');\n const filesToAdd = await getDirectoryFiles(docsDirectory);",
"score": 0.7841794490814209
}
] | typescript | const question = sanitizeInput(userInput); |
import type { Request } from 'mssql';
import { type GraphQLResolveInfo } from 'graphql';
import { DevConsoleLogger, logExecutionBegin, logExecutionEnd, logSafely } from '../logging';
import { DatabaseExecutor } from '../executor';
import { ConnectionManager } from '../utils';
import {
StoredProcedureManager,
StoredProcedureCacheManager,
StoredProcedureMetadataManager,
} from '../stored-procedure';
import type { MSSQLOptions, ILogger, IResolverProcedureResult, InputParameters } from '../types';
/**
* A GraphQL DataSource backed by a Microsoft SQL Server database.
* Maintains separate caching for Query and Mutation operations.
* Maintains a global connection pool cache to reuse connections.
*/
export class MSSQLDataSource {
private readonly _queryOptions: MSSQLOptions;
private readonly _mutationOptions: MSSQLOptions;
private readonly _queryLogger: ILogger;
private readonly _mutationLogger: ILogger;
private readonly _connectionManager: ConnectionManager;
private readonly _databaseExecutor: DatabaseExecutor;
private readonly _storedProcedureMetadataManager: StoredProcedureMetadataManager;
private readonly _storedProcedureCacheManager: StoredProcedureCacheManager;
private readonly _storedProcedureManager: StoredProcedureManager;
/**
* Creates a new MSSQLDataSource with the given options.
* @param queryOptions The options for Query operations
* @param mutationOptions The options for Mutation operations
*/
constructor(
queryOptions: MSSQLOptions = MSSQLDataSource.defaultOptions,
mutationOptions: MSSQLOptions = MSSQLDataSource.defaultOptions,
) {
this._queryOptions = queryOptions;
this._mutationOptions = mutationOptions;
const defaultOptions = MSSQLDataSource.defaultOptions;
this._queryLogger =
queryOptions.logger !== undefined ? queryOptions.logger : (defaultOptions.logger as ILogger);
this._mutationLogger =
mutationOptions.logger !== undefined
? mutationOptions.logger
: (defaultOptions.logger as ILogger);
this._connectionManager = new ConnectionManager(
this._queryOptions.config,
this._mutationOptions.config,
);
this._databaseExecutor = new DatabaseExecutor(this._connectionManager);
this._storedProcedureMetadataManager = new StoredProcedureMetadataManager(
this._databaseExecutor,
);
this._storedProcedureCacheManager = new StoredProcedureCacheManager();
this._storedProcedureManager = new StoredProcedureManager(
this._storedProcedureCacheManager,
this._storedProcedureMetadataManager,
);
}
/**
* Executes a stored procedure for a Query operation with the provided input parameters, and returns the result.
* @template T - This type parameter represents the type of the value returned by the resolver procedure.
* @param {string} storedProcedureName - The name of the stored procedure to execute.
* @param {StoredProcedureInput} input - The input parameters for the stored procedure.
* @param {GraphQLResolveInfo | undefined} info - If provided, will be used to case-insensitively map the stored
* procedure results to the correct schema field names.
* @returns A Promise that resolves to the result of the stored procedure execution.
*/
public async executeStoredProcedureQuery<T>(
storedProcedureName: string,
| input: InputParameters,
info?: GraphQLResolveInfo,
): Promise<IResolverProcedureResult<T>> { |
const startTime = performance.now();
const logger = this._queryLogger;
logExecutionBegin(logger, `Stored Procedure Query ${storedProcedureName} with inputs`, input);
const result = await this._databaseExecutor.executeQueryRequest(
async (request: Request): Promise<IResolverProcedureResult<T>> =>
await this._storedProcedureManager.executeStoredProcedure<T>(
storedProcedureName,
input,
request,
logger,
info,
),
logger,
);
logExecutionEnd(logger, `Stored Procedure Query ${storedProcedureName}`, startTime);
logSafely(logger, 'info', `------------------`);
return result;
}
/**
* Executes a stored procedure for a Mutation operation with the provided input parameters, and returns the result.
* @template T - This type parameter represents the type of the value returned by the resolver procedure.
* @param {string} storedProcedureName - The name of the stored procedure to execute.
* @param {StoredProcedureInput} input - The input parameters for the stored procedure.
* @param {GraphQLResolveInfo | undefined} info - If provided, will be used to case-insensitively map the stored
* procedure results to the correct schema field names.
* @returns A Promise that resolves to the result of the stored procedure execution.
*/
public async executeStoredProcedureMutation<T>(
storedProcedureName: string,
input: InputParameters,
info?: GraphQLResolveInfo,
): Promise<IResolverProcedureResult<T>> {
const startTime = performance.now();
const logger = this._mutationLogger;
logExecutionBegin(logger, `Stored Procedure Mutation ${storedProcedureName}`, input);
const result = await this._databaseExecutor.executeMutationRequest(
async (request: Request): Promise<IResolverProcedureResult<T>> =>
await this._storedProcedureManager.executeStoredProcedure(
storedProcedureName,
input,
request,
logger,
info,
),
logger,
);
logExecutionEnd(logger, `Stored Procedure Mutation ${storedProcedureName}`, startTime);
return result;
}
/**
* Default options for the Query and Mutation global connection pool cache.
*/
private static get defaultOptions(): MSSQLOptions {
return {
config: {
user: '',
password: '',
server: '',
database: '',
},
logger: new DevConsoleLogger(),
};
}
}
| src/lib/datasource/mssql-datasource.ts | Falven-mssql-data-source-bca6621 | [
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " * procedure results to the correct schema field names.\n * @param {ILogger} logger - The logger to use for logging.\n * @returns A Promise that resolves to the result of the stored procedure execution.\n */\n public async executeStoredProcedure<T>(\n storedProcedureName: string,\n input: InputParameters,\n request: Request,\n logger: ILogger,\n info?: GraphQLResolveInfo,",
"score": 0.9598199129104614
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " result[mappedKey] = obj[key];\n }\n return result as T;\n }\n /**\n * Prepares the stored procedure result into a GraphQL result object.\n * @param {IProcedureResult} result - The stored procedure result.\n * @param {GraphQLResolveInfo | undefined} info - If provided, will be used to case-insensitively map the stored\n * procedure results to the correct schema field names.\n * @returns {IResolverProcedureResult} A prepared GraphQL result object.",
"score": 0.9344096183776855
},
{
"filename": "src/lib/stored-procedure/stored-procedure-metadata-manager.ts",
"retrieved_chunk": " /**\n * Parses the stored procedure parameter schema into a StoredProcedureParameter array.\n * @param {string} storedProcedureName - The name of the stored procedure to parse the parameter schema for.\n * @param {IResult<StoredProcedureSchema>} schemaResult - The result of the stored procedure parameter schema query.\n * @returns A StoredProcedureParameter array.\n */\n public parseStoredProcedureParameters(\n storedProcedureName: string,\n schemaResult: IResult<StoredProcedureSchema>,\n ): IterableIterator<StoredProcedureParameter> {",
"score": 0.9246118068695068
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " private readonly _storedProcedureMetadataManager: StoredProcedureMetadataManager,\n ) {}\n /**\n * Executes a stored procedure with the provided input parameters, and returns the result.\n * @template TVal - The type of records in the result set.\n * @template TRet - The type of the result object to be returned.\n * @param {string} storedProcedureName - The name of the stored procedure to execute.\n * @param {StoredProcedureInput} input - The input parameters for the stored procedure.\n * @param {Request} request - The request to execute the stored procedure.\n * @param {GraphQLResolveInfo | undefined} info - If provided, will be used to case-insensitively map the stored",
"score": 0.9211307168006897
},
{
"filename": "src/lib/stored-procedure/stored-procedure-metadata-manager.ts",
"retrieved_chunk": " private static readonly parameterDefinitionRegex = /(@[\\w]+)\\s+([^\\s]+)\\s*=\\s*([^, ]*),?/gi;\n constructor(private readonly _databaseExecutor: DatabaseExecutor) {}\n /**\n * Parses the stored procedure parameter schema into a StoredProcedureParameter array.\n * @param {string} storedProcedureName - The name of the stored procedure to retrieve the parameter schema for.\n * @returns A Promise that resolves to the result of the stored procedure execution.\n */\n public async getStoredProcedureParameterSchema(\n storedProcedureName: string,\n logger: ILogger,",
"score": 0.9079691767692566
}
] | typescript | input: InputParameters,
info?: GraphQLResolveInfo,
): Promise<IResolverProcedureResult<T>> { |
/* eslint-disable no-await-in-loop */
import dotenv from 'dotenv';
import { OpenAIChat } from 'langchain/llms/openai';
// eslint-disable-next-line import/no-unresolved
import * as readline from 'node:readline/promises';
import path from 'path';
import fs from 'fs';
/* This line of code is importing the `stdin` and `stdout` streams from the `process` module in
Node.js. These streams are used for reading input from the user and writing output to the console,
respectively. */
import { stdin as input, stdout as output } from 'node:process';
import { CallbackManager } from 'langchain/callbacks';
import { ChatPromptTemplate, HumanMessagePromptTemplate, SystemMessagePromptTemplate } from 'langchain/prompts';
import { LLMChain } from 'langchain/chains';
import { oneLine } from 'common-tags';
import chalk from 'chalk';
import logChat from './chatLogger.js';
import createCommandHandler from './commands.js';
import { getMemoryVectorStore, addDocumentsToMemoryVectorStore, getBufferWindowMemory } from './lib/memoryManager.js';
import { getContextVectorStore } from './lib/contextManager.js';
import { getRelevantContext } from './lib/vectorStoreUtils.js';
import sanitizeInput from './utils/sanitizeInput.js';
import { getConfig, getProjectRoot } from './config/index.js';
const projectRootDir = getProjectRoot();
dotenv.config();
// Set up the chat log directory
const chatLogDirectory = path.join(projectRootDir, 'chat_logs');
// Get the prompt template
const systemPromptTemplate = fs.readFileSync(path.join(projectRootDir, 'src/prompt.txt'), 'utf8');
// Set up the readline interface to read input from the user and write output to the console
const rl = readline.createInterface({ input, output });
// Set up CLI commands
const commandHandler: CommandHandler = createCommandHandler();
const callbackManager = CallbackManager.fromHandlers({
// This function is called when the LLM generates a new token (i.e., a prediction for the next word)
async handleLLMNewToken(token: string) {
// Write the token to the output stream (i.e., the console)
output.write(token);
},
});
const llm = new OpenAIChat({
streaming: true,
callbackManager,
modelName: process.env.MODEL || 'gpt-3.5-turbo',
});
const systemPrompt = SystemMessagePromptTemplate.fromTemplate(oneLine`
${systemPromptTemplate}
`);
const chatPrompt = ChatPromptTemplate.fromPromptMessages([
systemPrompt,
HumanMessagePromptTemplate.fromTemplate('QUESTION: """{input}"""'),
]);
| const windowMemory = getBufferWindowMemory(); |
const chain = new LLMChain({
prompt: chatPrompt,
memory: windowMemory,
llm,
});
// eslint-disable-next-line no-constant-condition
while (true) {
output.write(chalk.green('\nStart chatting or type /help for a list of commands\n'));
const userInput = await rl.question('> ');
let response;
if (userInput.startsWith('/')) {
const [command, ...args] = userInput.slice(1).split(' ');
await commandHandler.execute(command, args, output);
} else {
const memoryVectorStore = await getMemoryVectorStore();
const contextVectorStore = await getContextVectorStore();
const question = sanitizeInput(userInput);
const config = getConfig();
const context = await getRelevantContext(contextVectorStore, question, config.numContextDocumentsToRetrieve);
const history = await getRelevantContext(memoryVectorStore, question, config.numMemoryDocumentsToRetrieve);
try {
response = await chain.call({
input: question,
context,
history,
immediate_history: config.useWindowMemory ? windowMemory : '',
});
if (response) {
await addDocumentsToMemoryVectorStore([
{ content: question, metadataType: 'question' },
{ content: response.text, metadataType: 'answer' },
]);
await logChat(chatLogDirectory, question, response.response);
}
} catch (error) {
if (error instanceof Error && error.message.includes('Cancel:')) {
// TODO: Handle cancel
} else if (error instanceof Error) {
output.write(chalk.red(error.message));
} else {
output.write(chalk.red(error));
}
}
}
output.write('\n');
}
| src/index.ts | gmickel-memorybot-bad0302 | [
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": "}\nconst bufferWindowMemory = new BufferWindowMemory({\n returnMessages: false,\n memoryKey: 'immediate_history',\n inputKey: 'input',\n k: 2,\n});\nconst memoryWrapper = {\n vectorStoreInstance: memoryVectorStore,\n};",
"score": 0.6314552426338196
},
{
"filename": "src/lib/contextManager.ts",
"retrieved_chunk": " if (dir === getConfig().currentVectorStoreDatabasePath) {\n output.write(chalk.green(` (Currently selected)`));\n }\n output.write('\\n');\n files.forEach((file) => {\n output.write(chalk.yellow(` File: ${file.name}, Size: ${file.size} KB\\n`));\n });\n });\n}\nexport { getContextVectorStore, addDocument, addURL, addYouTube, listContextStores, loadOrCreateEmptyVectorStore };",
"score": 0.6184670329093933
},
{
"filename": "src/commands.ts",
"retrieved_chunk": "import toggleWindowBufferMemoryCommand from './commands/toggleWindowBufferMemoryCommand.js';\nimport listContextStoresCommand from './commands/listContextStoresCommand.js';\nfunction createCommandHandler(): CommandHandler {\n const commands: Command[] = [\n helpCommand,\n quitCommand,\n resetChatCommand,\n addDocumentCommand,\n addURLCommand,\n addYouTubeCommand,",
"score": 0.6135560870170593
},
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": "const memoryDirectory = path.join(projectRootDir, process.env.MEMORY_VECTOR_STORE_DIR || 'memory');\nlet memoryVectorStore: HNSWLib;\ntry {\n memoryVectorStore = await HNSWLib.load(memoryDirectory, new OpenAIEmbeddings());\n} catch {\n output.write(`${chalk.blue(`Creating a new memory vector store index in the ${memoryDirectory} directory`)}\\n`);\n memoryVectorStore = new HNSWLib(new OpenAIEmbeddings(), {\n space: 'cosine',\n numDimensions: 1536,\n });",
"score": 0.6084282398223877
},
{
"filename": "src/lib/contextManager.ts",
"retrieved_chunk": "import ora from 'ora';\nimport { MarkdownTextSplitter, RecursiveCharacterTextSplitter } from 'langchain/text_splitter';\nimport { Document } from 'langchain/document';\nimport path from 'path';\nimport { YoutubeTranscript } from 'youtube-transcript';\nimport getDirectoryListWithDetails from '../utils/getDirectoryListWithDetails.js';\nimport createDirectory from '../utils/createDirectory.js';\nimport { getConfig, getDefaultOraOptions, getProjectRoot, setCurrentVectorStoreDatabasePath } from '../config/index.js';\nimport getDirectoryFiles from '../utils/getDirectoryFiles.js';\nimport WebCrawler from './crawler.js';",
"score": 0.6009848713874817
}
] | typescript | const windowMemory = getBufferWindowMemory(); |
import {
type ISqlTypeFactory,
type ISqlTypeFactoryWithLength,
type ISqlTypeFactoryWithNoParams,
type ISqlTypeFactoryWithPrecisionScale,
type ISqlTypeFactoryWithScale,
type ISqlTypeFactoryWithTvpType,
type ISqlTypeWithLength,
type ISqlTypeWithNoParams,
type ISqlTypeWithPrecisionScale,
type ISqlTypeWithScale,
type ISqlTypeWithTvpType,
TYPES,
MAX,
} from 'mssql';
import type { StoredProcedureParameter } from '../types';
type TypeFactory<T> = T extends ISqlTypeFactoryWithNoParams
? () => ISqlTypeWithNoParams
: T extends ISqlTypeFactoryWithLength
? (length?: number) => ISqlTypeWithLength
: T extends ISqlTypeFactoryWithScale
? (scale?: number) => ISqlTypeWithScale
: T extends ISqlTypeFactoryWithPrecisionScale
? (precision?: number, scale?: number) => ISqlTypeWithPrecisionScale
: T extends ISqlTypeFactoryWithTvpType
? (tvpType?: unknown) => ISqlTypeWithTvpType
: never;
type TypesType = typeof TYPES;
type TypesKey = keyof TypesType;
type IndexableTypes = {
[K in TypesKey]: TypeFactory<TypesType[K]>;
};
function isSqlTypeFactoryWithNoParams(
factoryObject: ISqlTypeFactory,
): factoryObject is ISqlTypeFactoryWithNoParams {
return (
factoryObject !== undefined &&
!('length' in factoryObject) &&
!('scale' in factoryObject) &&
!('precision' in factoryObject) &&
!('tvpType' in factoryObject)
);
}
function isSqlTypeFactoryWithLength(
factoryObject: ISqlTypeFactory,
): factoryObject is ISqlTypeFactoryWithLength {
return factoryObject !== undefined && 'length' in factoryObject;
}
function isSqlTypeFactoryWithScale(
factoryObject: ISqlTypeFactory,
): factoryObject is ISqlTypeFactoryWithScale {
return factoryObject !== undefined && 'scale' in factoryObject;
}
function isSqlTypeFactoryWithPrecisionScale(
factoryObject: ISqlTypeFactory,
): factoryObject is ISqlTypeFactoryWithPrecisionScale {
return factoryObject !== undefined && 'precision' in factoryObject && 'scale' in factoryObject;
}
function isSqlTypeFactoryWithTvpType(
factoryObject: ISqlTypeFactory,
): factoryObject is ISqlTypeFactoryWithTvpType {
return factoryObject !== undefined && 'tvpType' in factoryObject;
}
const findPropertyCaseInsensitive = (obj: object, propertyName: string): string | null => {
const lowercasePropertyName = propertyName.toLowerCase();
for (const key in obj) {
if (
Object.prototype.hasOwnProperty.call(obj, key) &&
key.toLowerCase() === lowercasePropertyName
) {
return key;
}
}
return null;
};
export const mapDbTypeToDriverType = ({
type,
length,
precision,
scale,
}: Pick<StoredProcedureParameter, 'type' | 'length' | 'precision' | 'scale'>): ISqlTypeFactory => {
const types: IndexableTypes = TYPES;
const property = findPropertyCaseInsensitive(types, type);
if (property !== null) {
const typeFactory = types[property as TypesKey];
if (isSqlTypeFactoryWithNoParams(typeFactory)) {
return typeFactory();
} else if (isSqlTypeFactoryWithLength(typeFactory)) {
return ( | typeFactory as ISqlTypeFactoryWithLength)(length === -1 ? MAX : length); |
} else if (isSqlTypeFactoryWithScale(typeFactory)) {
return (typeFactory as ISqlTypeFactoryWithScale)(scale);
} else if (isSqlTypeFactoryWithPrecisionScale(typeFactory)) {
return (typeFactory as ISqlTypeFactoryWithPrecisionScale)(precision, scale);
} else if (isSqlTypeFactoryWithTvpType(typeFactory)) {
return TYPES.NVarChar();
} else {
throw new Error(`Unknown SQL Type ${type}.`);
}
}
return TYPES.NVarChar();
};
type SqlValue = string | number | boolean | Date | Buffer;
const isStringOrNumber = (value: SqlValue): value is string | number => {
return typeof value === 'string' || typeof value === 'number';
};
const isDate = (value: SqlValue): value is Date => {
return value instanceof Date;
};
const isType = (sqlType: string, typePrefixes: string[]): boolean => {
return typePrefixes.some((prefix) => sqlType.startsWith(prefix));
};
export const convertSqlValueToJsValue = (value: SqlValue, sqlType: string): unknown => {
if (value === 'NULL') {
return null;
}
const lowerCaseSqlType = sqlType.toLowerCase();
if (
isType(lowerCaseSqlType, [
'varchar',
'nvarchar',
'char',
'nchar',
'text',
'ntext',
'xml',
'uniqueidentifier',
])
) {
return String(value);
}
if (
isType(lowerCaseSqlType, [
'int',
'smallint',
'tinyint',
'bigint',
'decimal',
'numeric',
'float',
'real',
'money',
'smallmoney',
])
) {
return Number(value);
}
if (isType(lowerCaseSqlType, ['bit'])) {
return Boolean(value);
}
if (isType(lowerCaseSqlType, ['date', 'datetime', 'datetime2', 'smalldatetime', 'time'])) {
if (isStringOrNumber(value) || isDate(value)) {
return new Date(value);
}
throw new Error('Cannot create a Date from a boolean value.');
}
if (isType(lowerCaseSqlType, ['binary', 'varbinary', 'image'])) {
return Buffer.from(value as Buffer);
}
if (isType(lowerCaseSqlType, ['rowversion', 'timestamp'])) {
return Buffer.from(value as Buffer);
}
if (isType(lowerCaseSqlType, ['hierarchyid', 'geometry', 'geography'])) {
return value;
}
if (isType(lowerCaseSqlType, ['tvp'])) {
throw new Error('TVPs are not supported.');
}
if (isType(lowerCaseSqlType, ['udt'])) {
throw new Error('UDTs are not supported.');
}
throw new Error(`Unsupported SQL type: ${sqlType}`);
};
| src/lib/utils/type-map.ts | Falven-mssql-data-source-bca6621 | [
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " for (const spParameter of storedProcedureParameters) {\n const { name, type, length, precision, scale, ...rest } = spParameter;\n const parameterName = name.slice(1);\n // Let's use the parameter name in lowercase as the lookup key.\n preparedParameters.set(parameterName.toLowerCase(), {\n name: parameterName,\n type: mapDbTypeToDriverType({\n type,\n length,\n precision,",
"score": 0.8192766308784485
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " private addParametersToRequest(\n parameters: Map<string, PreparedStoredProcedureParameter>,\n request: Request,\n ): Request {\n const preparedRequest = request;\n for (const parameter of parameters.values()) {\n const { name, type, mode, value, defaultValue } = parameter;\n if (defaultValue !== undefined && value === undefined) {\n continue;\n }",
"score": 0.8081583380699158
},
{
"filename": "src/lib/stored-procedure/stored-procedure-metadata-manager.ts",
"retrieved_chunk": " ): Promise<IResult<StoredProcedureSchema>> {\n return await this._databaseExecutor.executeQueryRequest(async (request: Request) => {\n // Remove square bracket notation if any, and split into schema and name.\n const schemaAndName = storedProcedureName.replace(/\\[|\\]/g, '').split('.');\n const result = await request.query<StoredProcedureSchema>(\n 'SELECT ' +\n 'PARAMETER_NAME as name, ' +\n 'DATA_TYPE as type, ' +\n 'PARAMETER_MODE as mode, ' +\n 'CHARACTER_MAXIMUM_LENGTH length, ' +",
"score": 0.8035471439361572
},
{
"filename": "src/lib/stored-procedure/stored-procedure-metadata-manager.ts",
"retrieved_chunk": " }\n const parameterDefinition = parameterSection[8];\n let parameterDefinitionMatch;\n while (\n (parameterDefinitionMatch =\n StoredProcedureMetadataManager.parameterDefinitionRegex.exec(parameterDefinition)) !== null\n ) {\n const name = parameterDefinitionMatch[1];\n const type = parameterDefinitionMatch[2];\n const defaultValue = parameterDefinitionMatch[3];",
"score": 0.7946239709854126
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " const preparedResult = this.prepareStoredProcedureResult(result, info);\n logPerformance(logger, 'prepareStoredProcedureResult', startTime);\n return preparedResult;\n }\n private prepareParameters(\n storedProcedureParameters: IterableIterator<StoredProcedureParameter>,\n input: InputParameters,\n ): Map<string, PreparedStoredProcedureParameter> {\n // We want to use the inferred DB Stored Procedure schema as the source of truth.\n const preparedParameters = new Map<string, PreparedStoredProcedureParameter>();",
"score": 0.7798211574554443
}
] | typescript | typeFactory as ISqlTypeFactoryWithLength)(length === -1 ? MAX : length); |
import { describe, expect, it } from 'vitest';
import { TypeAnalyzer } from '.';
import { TYPE_KIND } from './constants';
describe('function', () => {
it('overloading', () => {
const analyzer = new TypeAnalyzer(`
const t = 1
function a<B extends 222>(): void;
function b<A>(o: A): string;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 48 },
text: 'function a<B extends 222>(): void;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 49, end: 77 },
text: 'function b<A>(o: A): string;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
}
]);
});
it('function-generic-definition - a`<B extends ...>`()', () => {
const analyzer = new TypeAnalyzer(
`
function a<B extends 111, C extends 111>() {}
const b = <B extends 222, C extends 222>() => {};
const c = function<B extends 333, C extends 333>() {}
const d = {
a<B extends 444, C extends 444>() {}
}
`
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 11, end: 41 },
text: '<B extends 111, C extends 111>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 57, end: 87 },
text: '<B extends 222, C extends 222>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 115, end: 145 },
text: '<B extends 333, C extends 333>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 166, end: 196 },
text: '<B extends 444, C extends 444>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
}
]);
});
it('function-parameter - (`a: number, b: string, ...`)', () => {
const analyzer = new TypeAnalyzer(`
function a(a1: A111, a2?: A222) {}
const b = (b1: B111, b2?: B222) => {};
const c = function(c1: C111, c2?: C222) {}
const d = {
e(d1: E111, d2?: E222) {}
f: (f1: F111, f2?: F222) => {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 20 },
text: ': A111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 24, end: 31 },
text: '?: A222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 49, end: 55 },
text: ': B111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 59, end: 66 },
text: '?: B222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 96, end: 102 },
text: ': C111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 106, end: 113 },
text: '?: C222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 136, end: 142 },
text: ': E111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 146, end: 153 },
text: '?: E222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 166, end: 172 },
text: ': F111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 176, end: 183 },
text: '?: F222',
kind: TYPE_KIND.FUNCTION_PARAMETER
}
]);
});
it('function-return - ()`: number`', () => {
const analyzer = new TypeAnalyzer(`n
function a(): A111 {}
const b = (): B111 => {};
const c = function(): C111 {}
const d = {
d(): D111 {}
e: (): E111 => {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 20 },
text: ': A111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 36, end: 42 },
text: ': B111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 70, end: 76 },
text: ': C111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 97, end: 103 },
text: ': D111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 114, end: 120 },
text: ': E111',
kind: TYPE_KIND.FUNCTION_RETURN
}
]);
});
it('function-type-predicate - (a: any)`: asserts a is ...)`', () => {
const analyzer = new TypeAnalyzer(`
function a(value): asserts a is aaa {}
const b = (value): asserts b is bbb => {};
const c = function (value): asserts d is ddd {};
const d = {
e(value): asserts e is eee {},
f: (value): asserts f is fff => {}
};
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 18, end: 36 },
text: ': asserts a is aaa',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 58, end: 76 },
text: ': asserts b is bbb',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 111, end: 129 },
text: ': asserts d is ddd',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 157, end: 175 },
text: ': asserts e is eee',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 192, end: 210 },
text: ': asserts f is fff',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
}
]);
});
});
it('interface', () => {
const analyzer = new TypeAnalyzer(`
interface t {};
interface A111 {
a: number;
b: string;
c: {
e: 1
}
}`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 15 },
text: 'interface t {}',
kind: TYPE_KIND.INTERFACE
},
{
range: { pos: 17, end: 81 },
text: 'interface A111 {\n a: number;\n b: string;\n c: {\n e: 1\n }\n}',
kind: TYPE_KIND.INTERFACE
}
]);
});
it('type alias', () => {
const analyzer = new TypeAnalyzer(`
type t = number;
type A111 = {
a: number;
} | 123 & {}`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 17 },
text: 'type t = number;',
kind: | TYPE_KIND.TYPE_ALIAS
},
{ |
range: { pos: 18, end: 58 },
text: 'type A111 = {\n a: number;\n} | 123 & {}',
kind: TYPE_KIND.TYPE_ALIAS
}
]);
});
it('variable type definition', () => {
const analyzer = new TypeAnalyzer(`
const a = 1;
declare const b: number, c: string;
const d: number, e: string;
const eee: null | string = ''
let fff!: string = ''
using ggg: usingAny = fn();
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 49 },
text: 'declare const b: number, c: string;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 57, end: 65 },
text: ': number',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 68, end: 76 },
text: ': string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 87, end: 102 },
text: ': null | string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 115, end: 124 },
text: '!: string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 139, end: 149 },
text: ': usingAny',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
}
]);
});
it('declare statement', () => {
const analyzer = new TypeAnalyzer(`
declare const a: number;
declare function b(): number;
declare class c {}
declare module d {}
declare namespace e {}
declare enum f {}
declare global {}
declare module 'g' {}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 25 },
text: 'declare const a: number;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 26, end: 55 },
text: 'declare function b(): number;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 56, end: 74 },
text: 'declare class c {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 75, end: 94 },
text: 'declare module d {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 95, end: 117 },
text: 'declare namespace e {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 118, end: 135 },
text: 'declare enum f {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 136, end: 153 },
text: 'declare global {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 154, end: 175 },
text: "declare module 'g' {}",
kind: TYPE_KIND.DECLARE_STATEMENT
}
]);
});
it('as expression', () => {
const analyzer = new TypeAnalyzer(`
const a = 1 as number;
const b = 1 as number | string;
const c = 1 as number | string | null as 111 as 3;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 12, end: 22 },
text: ' as number',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 35, end: 54 },
text: ' as number | string',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 67, end: 93 },
text: ' as number | string | null',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 93, end: 100 },
text: ' as 111',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 100, end: 105 },
text: ' as 3',
kind: TYPE_KIND.AS_ASSERTION
}
]);
});
it('satisfies expression', () => {
const analyzer = new TypeAnalyzer(`
const a = 1 satisfies number;
const b = 1 satisfies number | string;
const c = 1 satisfies number | string | null;
const d = () => {
return 333 satisfies any
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 12, end: 29 },
text: ' satisfies number',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 42, end: 68 },
text: ' satisfies number | string',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 81, end: 114 },
text: ' satisfies number | string | null',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 147, end: 161 },
text: ' satisfies any',
kind: TYPE_KIND.SATISFIES_OPERATOR
}
]);
});
it('satisfies & as', () => {
const analyzer = new TypeAnalyzer(`
const a = {} satisfies {} as const;
const b = {} as const satisfies {};
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
kind: TYPE_KIND.SATISFIES_OPERATOR,
range: { pos: 13, end: 26 },
text: ' satisfies {}'
},
{
kind: TYPE_KIND.AS_ASSERTION,
range: { pos: 26, end: 35 },
text: ' as const'
},
{
kind: TYPE_KIND.AS_ASSERTION,
range: { pos: 49, end: 58 },
text: ' as const'
},
{
kind: TYPE_KIND.SATISFIES_OPERATOR,
range: { pos: 58, end: 71 },
text: ' satisfies {}'
}
]);
});
it('type assertion', () => {
const analyzer = new TypeAnalyzer(`
const a =<number>1;
const b = <number | string>1;
const c = <number | string | null>1;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 10, end: 18 },
text: '<number>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{
range: { pos: 31, end: 48 },
text: '<number | string>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{
range: { pos: 61, end: 85 },
text: '<number | string | null>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
}
]);
});
it('call expression', () => {
const analyzer = new TypeAnalyzer(`
b<number>();
new d<number, string>();
f<number, string, null>();
new Set<PersistListener<S>>()
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 2, end: 10 },
text: '<number>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { pos: 19, end: 35 },
text: '<number, string>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { pos: 40, end: 62 },
text: '<number, string, null>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { end: 93, pos: 73 },
text: '<PersistListener<S>>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
}
]);
});
describe('class', () => {
it('property type definition', () => {
const analyzer = new TypeAnalyzer(`
class A {
a: number;
public b: string;
protected c: {
e: 1
}
private d: () => void = () => {}
e!: boolean;
g?: string;
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 22 },
text: ': number',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 34, end: 42 },
text: ': string',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 57, end: 73 },
text: ': {\n e: 1\n }',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 85, end: 97 },
text: ': () => void',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 112, end: 122 },
text: '!: boolean',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { end: 136, pos: 127 },
text: '?: string',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
}
]);
});
it('method declaration', () => {
const analyzer = new TypeAnalyzer(`
class A {
public a(p: 1): boolean;
public a(p: 2): number;
public a(p: 1 | 2): boolean | number {
return '' as any;
}
public b(a: number): string;
protected c(b: number | 1): {
e: 1
}
protected get compileUtils(): any | 'compileUtils' {
const abc = {
getConfig: (): ReadonlyDeep<InnerCompilerConfig> => {
return getCurrentCompileConfig() as any as unknown;
},
b(): void {}
}
}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 11, end: 37 },
text: ' public a(p: 1): boolean;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 38, end: 63 },
text: ' public a(p: 2): number;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 76, end: 83 },
text: ': 1 | 2',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 84, end: 102 },
text: ': boolean | number',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 118, end: 125 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 131, end: 161 },
text: ' public b(a: number): string;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 162, end: 206 },
text: ' protected c(b: number | 1): {\n e: 1\n }',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 237, end: 259 },
text: ": any | 'compileUtils'",
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 299, end: 334 },
text: ': ReadonlyDeep<InnerCompilerConfig>',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 380, end: 387 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 387, end: 398 },
text: ' as unknown',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 418, end: 424 },
text: ': void',
kind: TYPE_KIND.FUNCTION_RETURN
}
]);
});
it('constructor', () => {
const analyzer = new TypeAnalyzer(`
class A {
constructor(a: number) {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 26, end: 34 },
text: ': number',
kind: TYPE_KIND.FUNCTION_PARAMETER
}
]);
});
});
describe('tsx', () => {
it('generic arguments', () => {
const analyzer = new TypeAnalyzer(
`
const a = <Component<number> />
const b = <A<number, string> />
const c = <A<number, string, null> />
const d = <A
<number, string, null, 1, 2 | 3, [22]>
/>
`,
true
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 23, end: 31 },
text: '<number>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 49, end: 65 },
text: '<number, string>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 83, end: 105 },
text: '<number, string, null>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 128, end: 166 },
text: '<number, string, null, 1, 2 | 3, [22]>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
}
]);
});
it('integration', () => {
const analyzer = new TypeAnalyzer(
`
const a = <Component<number>
name
test={111 as any}
t2={\`...\${11 as string}\`}
{...test as object}
/>
`,
true
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 23, end: 31 },
text: '<number>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 58, end: 65 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 85, end: 95 },
text: ' as string',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 113, end: 123 },
text: ' as object',
kind: TYPE_KIND.AS_ASSERTION
}
]);
});
});
| src/core/helpers/type-analyzer/index.test.ts | xlboy-ts-type-hidden-a749a29 | [
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " ]);\n }\n }\n }\n private pushAnalyzedType(\n kind: AnalyzedType['kind'],\n range: [pos: number, end: number]\n ) {\n const [pos, end] = range;\n const text = this.sourceFile.text.slice(pos, end);",
"score": 0.8026598691940308
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " return this.pushAnalyzedType(TYPE_KIND.FUNCTION_CALL_GENERIC, [\n prevNode.end - 1,\n nextNode.pos + 1\n ]);\n }\n }\n // context: `<number>a`, get `<number>`\n function handleParentTypeAssertionExpr(\n this: TypeAnalyzer,\n parent: ts.TypeAssertion,",
"score": 0.7914201021194458
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " const prevNode = children[startIndex - 1];\n // >\n const nextNode = children[endIndex + 1];\n return this.pushAnalyzedType(TYPE_KIND.TSX_COMPONENT_GENERIC, [\n prevNode.end - 1,\n nextNode.pos\n ]);\n }\n }\n // [class] context: `class A { a?: number }`, get `?: number`",
"score": 0.7906031608581543
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " return this.pushAnalyzedType(TYPE_KIND.ANGLE_BRACKETS_ASSERTION, [\n prevNode.end - 1,\n nextNode.pos + 1\n ]);\n }\n // context = `a as number` | `a satisfies number`, curChild = `number`\n function handleParentAsOrSatisfiesExpr(\n this: TypeAnalyzer,\n parent: ts.AsExpression | ts.SatisfiesExpression,\n curChild: ts.Node",
"score": 0.7756657600402832
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " }\n return this.pushAnalyzedType(TYPE_KIND.FUNCTION_OVERLOAD, [\n startPos,\n parent.end\n ]);\n } else {\n return this.pushAnalyzedType(TYPE_KIND.FUNCTION_OVERLOAD, [\n parent.pos,\n parent.end\n ]);",
"score": 0.764076828956604
}
] | typescript | TYPE_KIND.TYPE_ALIAS
},
{ |
import {
type ISqlTypeFactory,
type ISqlTypeFactoryWithLength,
type ISqlTypeFactoryWithNoParams,
type ISqlTypeFactoryWithPrecisionScale,
type ISqlTypeFactoryWithScale,
type ISqlTypeFactoryWithTvpType,
type ISqlTypeWithLength,
type ISqlTypeWithNoParams,
type ISqlTypeWithPrecisionScale,
type ISqlTypeWithScale,
type ISqlTypeWithTvpType,
TYPES,
MAX,
} from 'mssql';
import type { StoredProcedureParameter } from '../types';
type TypeFactory<T> = T extends ISqlTypeFactoryWithNoParams
? () => ISqlTypeWithNoParams
: T extends ISqlTypeFactoryWithLength
? (length?: number) => ISqlTypeWithLength
: T extends ISqlTypeFactoryWithScale
? (scale?: number) => ISqlTypeWithScale
: T extends ISqlTypeFactoryWithPrecisionScale
? (precision?: number, scale?: number) => ISqlTypeWithPrecisionScale
: T extends ISqlTypeFactoryWithTvpType
? (tvpType?: unknown) => ISqlTypeWithTvpType
: never;
type TypesType = typeof TYPES;
type TypesKey = keyof TypesType;
type IndexableTypes = {
[K in TypesKey]: TypeFactory<TypesType[K]>;
};
function isSqlTypeFactoryWithNoParams(
factoryObject: ISqlTypeFactory,
): factoryObject is ISqlTypeFactoryWithNoParams {
return (
factoryObject !== undefined &&
!('length' in factoryObject) &&
!('scale' in factoryObject) &&
!('precision' in factoryObject) &&
!('tvpType' in factoryObject)
);
}
function isSqlTypeFactoryWithLength(
factoryObject: ISqlTypeFactory,
): factoryObject is ISqlTypeFactoryWithLength {
return factoryObject !== undefined && 'length' in factoryObject;
}
function isSqlTypeFactoryWithScale(
factoryObject: ISqlTypeFactory,
): factoryObject is ISqlTypeFactoryWithScale {
return factoryObject !== undefined && 'scale' in factoryObject;
}
function isSqlTypeFactoryWithPrecisionScale(
factoryObject: ISqlTypeFactory,
): factoryObject is ISqlTypeFactoryWithPrecisionScale {
return factoryObject !== undefined && 'precision' in factoryObject && 'scale' in factoryObject;
}
function isSqlTypeFactoryWithTvpType(
factoryObject: ISqlTypeFactory,
): factoryObject is ISqlTypeFactoryWithTvpType {
return factoryObject !== undefined && 'tvpType' in factoryObject;
}
const findPropertyCaseInsensitive = (obj: object, propertyName: string): string | null => {
const lowercasePropertyName = propertyName.toLowerCase();
for (const key in obj) {
if (
Object.prototype.hasOwnProperty.call(obj, key) &&
key.toLowerCase() === lowercasePropertyName
) {
return key;
}
}
return null;
};
export const mapDbTypeToDriverType = ({
type,
length,
precision,
scale,
| }: Pick<StoredProcedureParameter, 'type' | 'length' | 'precision' | 'scale'>): ISqlTypeFactory => { |
const types: IndexableTypes = TYPES;
const property = findPropertyCaseInsensitive(types, type);
if (property !== null) {
const typeFactory = types[property as TypesKey];
if (isSqlTypeFactoryWithNoParams(typeFactory)) {
return typeFactory();
} else if (isSqlTypeFactoryWithLength(typeFactory)) {
return (typeFactory as ISqlTypeFactoryWithLength)(length === -1 ? MAX : length);
} else if (isSqlTypeFactoryWithScale(typeFactory)) {
return (typeFactory as ISqlTypeFactoryWithScale)(scale);
} else if (isSqlTypeFactoryWithPrecisionScale(typeFactory)) {
return (typeFactory as ISqlTypeFactoryWithPrecisionScale)(precision, scale);
} else if (isSqlTypeFactoryWithTvpType(typeFactory)) {
return TYPES.NVarChar();
} else {
throw new Error(`Unknown SQL Type ${type}.`);
}
}
return TYPES.NVarChar();
};
type SqlValue = string | number | boolean | Date | Buffer;
const isStringOrNumber = (value: SqlValue): value is string | number => {
return typeof value === 'string' || typeof value === 'number';
};
const isDate = (value: SqlValue): value is Date => {
return value instanceof Date;
};
const isType = (sqlType: string, typePrefixes: string[]): boolean => {
return typePrefixes.some((prefix) => sqlType.startsWith(prefix));
};
export const convertSqlValueToJsValue = (value: SqlValue, sqlType: string): unknown => {
if (value === 'NULL') {
return null;
}
const lowerCaseSqlType = sqlType.toLowerCase();
if (
isType(lowerCaseSqlType, [
'varchar',
'nvarchar',
'char',
'nchar',
'text',
'ntext',
'xml',
'uniqueidentifier',
])
) {
return String(value);
}
if (
isType(lowerCaseSqlType, [
'int',
'smallint',
'tinyint',
'bigint',
'decimal',
'numeric',
'float',
'real',
'money',
'smallmoney',
])
) {
return Number(value);
}
if (isType(lowerCaseSqlType, ['bit'])) {
return Boolean(value);
}
if (isType(lowerCaseSqlType, ['date', 'datetime', 'datetime2', 'smalldatetime', 'time'])) {
if (isStringOrNumber(value) || isDate(value)) {
return new Date(value);
}
throw new Error('Cannot create a Date from a boolean value.');
}
if (isType(lowerCaseSqlType, ['binary', 'varbinary', 'image'])) {
return Buffer.from(value as Buffer);
}
if (isType(lowerCaseSqlType, ['rowversion', 'timestamp'])) {
return Buffer.from(value as Buffer);
}
if (isType(lowerCaseSqlType, ['hierarchyid', 'geometry', 'geography'])) {
return value;
}
if (isType(lowerCaseSqlType, ['tvp'])) {
throw new Error('TVPs are not supported.');
}
if (isType(lowerCaseSqlType, ['udt'])) {
throw new Error('UDTs are not supported.');
}
throw new Error(`Unsupported SQL type: ${sqlType}`);
};
| src/lib/utils/type-map.ts | Falven-mssql-data-source-bca6621 | [
{
"filename": "src/lib/types/prepared-stored-procedure-parameter.ts",
"retrieved_chunk": "import type { StoredProcedureParameter, DriverType } from '.';\n/**\n * Final parameters that will be passed to the stored procedure request.\n */\nexport type PreparedStoredProcedureParameter = Omit<StoredProcedureParameter, 'type'> & {\n type: DriverType;\n value?: unknown;\n};",
"score": 0.7344692945480347
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": "import { camelCase } from 'lodash';\nimport { type Request, type IProcedureResult, type IResult, type IRecordSet } from 'mssql';\nimport { type GraphQLResolveInfo } from 'graphql';\nimport {\n type DriverType,\n type PreparedStoredProcedureParameter,\n ParameterMode,\n type StoredProcedureSchema,\n type StoredProcedureParameter,\n type ILogger,",
"score": 0.7293434143066406
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " for (const spParameter of storedProcedureParameters) {\n const { name, type, length, precision, scale, ...rest } = spParameter;\n const parameterName = name.slice(1);\n // Let's use the parameter name in lowercase as the lookup key.\n preparedParameters.set(parameterName.toLowerCase(), {\n name: parameterName,\n type: mapDbTypeToDriverType({\n type,\n length,\n precision,",
"score": 0.7271685600280762
},
{
"filename": "src/lib/stored-procedure/stored-procedure-metadata-manager.ts",
"retrieved_chunk": " ): Promise<IResult<StoredProcedureSchema>> {\n return await this._databaseExecutor.executeQueryRequest(async (request: Request) => {\n // Remove square bracket notation if any, and split into schema and name.\n const schemaAndName = storedProcedureName.replace(/\\[|\\]/g, '').split('.');\n const result = await request.query<StoredProcedureSchema>(\n 'SELECT ' +\n 'PARAMETER_NAME as name, ' +\n 'DATA_TYPE as type, ' +\n 'PARAMETER_MODE as mode, ' +\n 'CHARACTER_MAXIMUM_LENGTH length, ' +",
"score": 0.7210423946380615
},
{
"filename": "src/lib/types/i-stored-procedure-parameter.ts",
"retrieved_chunk": " */\nexport interface StoredProcedureParameter {\n name: string;\n type: string;\n mode: ParameterMode;\n defaultValue?: unknown;\n length?: number;\n precision?: number;\n scale?: number;\n}",
"score": 0.7146440744400024
}
] | typescript | }: Pick<StoredProcedureParameter, 'type' | 'length' | 'precision' | 'scale'>): ISqlTypeFactory => { |
import {
type ISqlTypeFactory,
type ISqlTypeFactoryWithLength,
type ISqlTypeFactoryWithNoParams,
type ISqlTypeFactoryWithPrecisionScale,
type ISqlTypeFactoryWithScale,
type ISqlTypeFactoryWithTvpType,
type ISqlTypeWithLength,
type ISqlTypeWithNoParams,
type ISqlTypeWithPrecisionScale,
type ISqlTypeWithScale,
type ISqlTypeWithTvpType,
TYPES,
MAX,
} from 'mssql';
import type { StoredProcedureParameter } from '../types';
type TypeFactory<T> = T extends ISqlTypeFactoryWithNoParams
? () => ISqlTypeWithNoParams
: T extends ISqlTypeFactoryWithLength
? (length?: number) => ISqlTypeWithLength
: T extends ISqlTypeFactoryWithScale
? (scale?: number) => ISqlTypeWithScale
: T extends ISqlTypeFactoryWithPrecisionScale
? (precision?: number, scale?: number) => ISqlTypeWithPrecisionScale
: T extends ISqlTypeFactoryWithTvpType
? (tvpType?: unknown) => ISqlTypeWithTvpType
: never;
type TypesType = typeof TYPES;
type TypesKey = keyof TypesType;
type IndexableTypes = {
[K in TypesKey]: TypeFactory<TypesType[K]>;
};
function isSqlTypeFactoryWithNoParams(
factoryObject: ISqlTypeFactory,
): factoryObject is ISqlTypeFactoryWithNoParams {
return (
factoryObject !== undefined &&
!('length' in factoryObject) &&
!('scale' in factoryObject) &&
!('precision' in factoryObject) &&
!('tvpType' in factoryObject)
);
}
function isSqlTypeFactoryWithLength(
factoryObject: ISqlTypeFactory,
): factoryObject is ISqlTypeFactoryWithLength {
return factoryObject !== undefined && 'length' in factoryObject;
}
function isSqlTypeFactoryWithScale(
factoryObject: ISqlTypeFactory,
): factoryObject is ISqlTypeFactoryWithScale {
return factoryObject !== undefined && 'scale' in factoryObject;
}
function isSqlTypeFactoryWithPrecisionScale(
factoryObject: ISqlTypeFactory,
): factoryObject is ISqlTypeFactoryWithPrecisionScale {
return factoryObject !== undefined && 'precision' in factoryObject && 'scale' in factoryObject;
}
function isSqlTypeFactoryWithTvpType(
factoryObject: ISqlTypeFactory,
): factoryObject is ISqlTypeFactoryWithTvpType {
return factoryObject !== undefined && 'tvpType' in factoryObject;
}
const findPropertyCaseInsensitive = (obj: object, propertyName: string): string | null => {
const lowercasePropertyName = propertyName.toLowerCase();
for (const key in obj) {
if (
Object.prototype.hasOwnProperty.call(obj, key) &&
key.toLowerCase() === lowercasePropertyName
) {
return key;
}
}
return null;
};
export const mapDbTypeToDriverType = ({
type,
length,
precision,
scale,
}: Pick<StoredProcedureParameter, 'type' | 'length' | 'precision' | 'scale'>): ISqlTypeFactory => {
const types: IndexableTypes = TYPES;
| const property = findPropertyCaseInsensitive(types, type); |
if (property !== null) {
const typeFactory = types[property as TypesKey];
if (isSqlTypeFactoryWithNoParams(typeFactory)) {
return typeFactory();
} else if (isSqlTypeFactoryWithLength(typeFactory)) {
return (typeFactory as ISqlTypeFactoryWithLength)(length === -1 ? MAX : length);
} else if (isSqlTypeFactoryWithScale(typeFactory)) {
return (typeFactory as ISqlTypeFactoryWithScale)(scale);
} else if (isSqlTypeFactoryWithPrecisionScale(typeFactory)) {
return (typeFactory as ISqlTypeFactoryWithPrecisionScale)(precision, scale);
} else if (isSqlTypeFactoryWithTvpType(typeFactory)) {
return TYPES.NVarChar();
} else {
throw new Error(`Unknown SQL Type ${type}.`);
}
}
return TYPES.NVarChar();
};
type SqlValue = string | number | boolean | Date | Buffer;
const isStringOrNumber = (value: SqlValue): value is string | number => {
return typeof value === 'string' || typeof value === 'number';
};
const isDate = (value: SqlValue): value is Date => {
return value instanceof Date;
};
const isType = (sqlType: string, typePrefixes: string[]): boolean => {
return typePrefixes.some((prefix) => sqlType.startsWith(prefix));
};
export const convertSqlValueToJsValue = (value: SqlValue, sqlType: string): unknown => {
if (value === 'NULL') {
return null;
}
const lowerCaseSqlType = sqlType.toLowerCase();
if (
isType(lowerCaseSqlType, [
'varchar',
'nvarchar',
'char',
'nchar',
'text',
'ntext',
'xml',
'uniqueidentifier',
])
) {
return String(value);
}
if (
isType(lowerCaseSqlType, [
'int',
'smallint',
'tinyint',
'bigint',
'decimal',
'numeric',
'float',
'real',
'money',
'smallmoney',
])
) {
return Number(value);
}
if (isType(lowerCaseSqlType, ['bit'])) {
return Boolean(value);
}
if (isType(lowerCaseSqlType, ['date', 'datetime', 'datetime2', 'smalldatetime', 'time'])) {
if (isStringOrNumber(value) || isDate(value)) {
return new Date(value);
}
throw new Error('Cannot create a Date from a boolean value.');
}
if (isType(lowerCaseSqlType, ['binary', 'varbinary', 'image'])) {
return Buffer.from(value as Buffer);
}
if (isType(lowerCaseSqlType, ['rowversion', 'timestamp'])) {
return Buffer.from(value as Buffer);
}
if (isType(lowerCaseSqlType, ['hierarchyid', 'geometry', 'geography'])) {
return value;
}
if (isType(lowerCaseSqlType, ['tvp'])) {
throw new Error('TVPs are not supported.');
}
if (isType(lowerCaseSqlType, ['udt'])) {
throw new Error('UDTs are not supported.');
}
throw new Error(`Unsupported SQL type: ${sqlType}`);
};
| src/lib/utils/type-map.ts | Falven-mssql-data-source-bca6621 | [
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " for (const spParameter of storedProcedureParameters) {\n const { name, type, length, precision, scale, ...rest } = spParameter;\n const parameterName = name.slice(1);\n // Let's use the parameter name in lowercase as the lookup key.\n preparedParameters.set(parameterName.toLowerCase(), {\n name: parameterName,\n type: mapDbTypeToDriverType({\n type,\n length,\n precision,",
"score": 0.811016321182251
},
{
"filename": "src/lib/stored-procedure/stored-procedure-metadata-manager.ts",
"retrieved_chunk": " ): Promise<IResult<StoredProcedureSchema>> {\n return await this._databaseExecutor.executeQueryRequest(async (request: Request) => {\n // Remove square bracket notation if any, and split into schema and name.\n const schemaAndName = storedProcedureName.replace(/\\[|\\]/g, '').split('.');\n const result = await request.query<StoredProcedureSchema>(\n 'SELECT ' +\n 'PARAMETER_NAME as name, ' +\n 'DATA_TYPE as type, ' +\n 'PARAMETER_MODE as mode, ' +\n 'CHARACTER_MAXIMUM_LENGTH length, ' +",
"score": 0.7902240753173828
},
{
"filename": "src/lib/types/prepared-stored-procedure-parameter.ts",
"retrieved_chunk": "import type { StoredProcedureParameter, DriverType } from '.';\n/**\n * Final parameters that will be passed to the stored procedure request.\n */\nexport type PreparedStoredProcedureParameter = Omit<StoredProcedureParameter, 'type'> & {\n type: DriverType;\n value?: unknown;\n};",
"score": 0.7835551500320435
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": "import { camelCase } from 'lodash';\nimport { type Request, type IProcedureResult, type IResult, type IRecordSet } from 'mssql';\nimport { type GraphQLResolveInfo } from 'graphql';\nimport {\n type DriverType,\n type PreparedStoredProcedureParameter,\n ParameterMode,\n type StoredProcedureSchema,\n type StoredProcedureParameter,\n type ILogger,",
"score": 0.7726752758026123
},
{
"filename": "src/lib/types/driver-type.ts",
"retrieved_chunk": "import type { ISqlType } from 'mssql';\n/**\n * Driver types that can be used to specify the type of a stored procedure parameter.\n */\nexport type DriverType = (() => ISqlType) | ISqlType;",
"score": 0.7613694667816162
}
] | typescript | const property = findPropertyCaseInsensitive(types, type); |
import { describe, expect, it } from 'vitest';
import { TypeAnalyzer } from '.';
import { TYPE_KIND } from './constants';
describe('function', () => {
it('overloading', () => {
const analyzer = new TypeAnalyzer(`
const t = 1
function a<B extends 222>(): void;
function b<A>(o: A): string;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 48 },
text: 'function a<B extends 222>(): void;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 49, end: 77 },
text: 'function b<A>(o: A): string;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
}
]);
});
it('function-generic-definition - a`<B extends ...>`()', () => {
const analyzer = new TypeAnalyzer(
`
function a<B extends 111, C extends 111>() {}
const b = <B extends 222, C extends 222>() => {};
const c = function<B extends 333, C extends 333>() {}
const d = {
a<B extends 444, C extends 444>() {}
}
`
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 11, end: 41 },
text: '<B extends 111, C extends 111>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 57, end: 87 },
text: '<B extends 222, C extends 222>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 115, end: 145 },
text: '<B extends 333, C extends 333>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 166, end: 196 },
text: '<B extends 444, C extends 444>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
}
]);
});
it('function-parameter - (`a: number, b: string, ...`)', () => {
const analyzer = new TypeAnalyzer(`
function a(a1: A111, a2?: A222) {}
const b = (b1: B111, b2?: B222) => {};
const c = function(c1: C111, c2?: C222) {}
const d = {
e(d1: E111, d2?: E222) {}
f: (f1: F111, f2?: F222) => {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 20 },
text: ': A111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 24, end: 31 },
text: '?: A222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 49, end: 55 },
text: ': B111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 59, end: 66 },
text: '?: B222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 96, end: 102 },
text: ': C111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 106, end: 113 },
text: '?: C222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 136, end: 142 },
text: ': E111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 146, end: 153 },
text: '?: E222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 166, end: 172 },
text: ': F111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 176, end: 183 },
text: '?: F222',
kind: TYPE_KIND.FUNCTION_PARAMETER
}
]);
});
it('function-return - ()`: number`', () => {
const analyzer = new TypeAnalyzer(`n
function a(): A111 {}
const b = (): B111 => {};
const c = function(): C111 {}
const d = {
d(): D111 {}
e: (): E111 => {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 20 },
text: ': A111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 36, end: 42 },
text: ': B111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 70, end: 76 },
text: ': C111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 97, end: 103 },
text: ': D111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 114, end: 120 },
text: ': E111',
kind: TYPE_KIND.FUNCTION_RETURN
}
]);
});
it('function-type-predicate - (a: any)`: asserts a is ...)`', () => {
const analyzer = new TypeAnalyzer(`
function a(value): asserts a is aaa {}
const b = (value): asserts b is bbb => {};
const c = function (value): asserts d is ddd {};
const d = {
e(value): asserts e is eee {},
f: (value): asserts f is fff => {}
};
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 18, end: 36 },
text: ': asserts a is aaa',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 58, end: 76 },
text: ': asserts b is bbb',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 111, end: 129 },
text: ': asserts d is ddd',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 157, end: 175 },
text: ': asserts e is eee',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 192, end: 210 },
text: ': asserts f is fff',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
}
]);
});
});
it('interface', () => {
const analyzer = new TypeAnalyzer(`
interface t {};
interface A111 {
a: number;
b: string;
c: {
e: 1
}
}`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 15 },
text: 'interface t {}',
kind: TYPE_KIND.INTERFACE
},
{
range: { pos: 17, end: 81 },
text: 'interface A111 {\n a: number;\n b: string;\n c: {\n e: 1\n }\n}',
kind: TYPE_KIND.INTERFACE
}
]);
});
it('type alias', () => {
const analyzer = new TypeAnalyzer(`
type t = number;
type A111 = {
a: number;
} | 123 & {}`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 17 },
text: 'type t = number;',
kind: TYPE_KIND.TYPE_ALIAS
},
{
range: { pos: 18, end: 58 },
text: 'type A111 = {\n a: number;\n} | 123 & {}',
kind: TYPE_KIND.TYPE_ALIAS
}
]);
});
it('variable type definition', () => {
const analyzer = new TypeAnalyzer(`
const a = 1;
declare const b: number, c: string;
const d: number, e: string;
const eee: null | string = ''
let fff!: string = ''
using ggg: usingAny = fn();
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 49 },
text: 'declare const b: number, c: string;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 57, end: 65 },
text: ': number',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 68, end: 76 },
text: ': string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 87, end: 102 },
text: ': null | string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 115, end: 124 },
text: '!: string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 139, end: 149 },
text: ': usingAny',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
}
]);
});
it('declare statement', () => {
const analyzer = new TypeAnalyzer(`
declare const a: number;
declare function b(): number;
declare class c {}
declare module d {}
declare namespace e {}
declare enum f {}
declare global {}
declare module 'g' {}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 25 },
text: 'declare const a: number;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 26, end: 55 },
text: 'declare function b(): number;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 56, end: 74 },
text: 'declare class c {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 75, end: 94 },
text: 'declare module d {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 95, end: 117 },
text: 'declare namespace e {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 118, end: 135 },
text: 'declare enum f {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 136, end: 153 },
text: 'declare global {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 154, end: 175 },
text: "declare module 'g' {}",
kind: TYPE_KIND.DECLARE_STATEMENT
}
]);
});
it('as expression', () => {
const analyzer = new TypeAnalyzer(`
const a = 1 as number;
const b = 1 as number | string;
const c = 1 as number | string | null as 111 as 3;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 12, end: 22 },
text: ' as number',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 35, end: 54 },
text: ' as number | string',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 67, end: 93 },
text: ' as number | string | null',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 93, end: 100 },
text: ' as 111',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 100, end: 105 },
text: ' as 3',
kind: TYPE_KIND.AS_ASSERTION
}
]);
});
it('satisfies expression', () => {
const analyzer = new TypeAnalyzer(`
const a = 1 satisfies number;
const b = 1 satisfies number | string;
const c = 1 satisfies number | string | null;
const d = () => {
return 333 satisfies any
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 12, end: 29 },
text: ' satisfies number',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 42, end: 68 },
text: ' satisfies number | string',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 81, end: 114 },
text: ' satisfies number | string | null',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 147, end: 161 },
text: ' satisfies any',
kind: TYPE_KIND.SATISFIES_OPERATOR
}
]);
});
it('satisfies & as', () => {
const analyzer = new TypeAnalyzer(`
const a = {} satisfies {} as const;
const b = {} as const satisfies {};
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
kind: TYPE_KIND.SATISFIES_OPERATOR,
range: { pos: 13, end: 26 },
text: ' satisfies {}'
},
{
kind: TYPE_KIND.AS_ASSERTION,
range: { pos: 26, end: 35 },
text: ' as const'
},
{
kind: TYPE_KIND.AS_ASSERTION,
range: { pos: 49, end: 58 },
text: ' as const'
},
{
kind: TYPE_KIND.SATISFIES_OPERATOR,
range: { pos: 58, end: 71 },
text: ' satisfies {}'
}
]);
});
it('type assertion', () => {
const analyzer = new TypeAnalyzer(`
const a =<number>1;
const b = <number | string>1;
const c = <number | string | null>1;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 10, end: 18 },
text: '<number>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{
range: { pos: 31, end: 48 },
text: '<number | string>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{
range: { pos: 61, end: 85 },
text: '<number | string | null>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
}
]);
});
it('call expression', () => {
const analyzer = new TypeAnalyzer(`
b<number>();
new d<number, string>();
f<number, string, null>();
new Set<PersistListener<S>>()
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 2, end: 10 },
text: '<number>',
| kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{ |
range: { pos: 19, end: 35 },
text: '<number, string>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { pos: 40, end: 62 },
text: '<number, string, null>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { end: 93, pos: 73 },
text: '<PersistListener<S>>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
}
]);
});
describe('class', () => {
it('property type definition', () => {
const analyzer = new TypeAnalyzer(`
class A {
a: number;
public b: string;
protected c: {
e: 1
}
private d: () => void = () => {}
e!: boolean;
g?: string;
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 22 },
text: ': number',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 34, end: 42 },
text: ': string',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 57, end: 73 },
text: ': {\n e: 1\n }',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 85, end: 97 },
text: ': () => void',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 112, end: 122 },
text: '!: boolean',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { end: 136, pos: 127 },
text: '?: string',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
}
]);
});
it('method declaration', () => {
const analyzer = new TypeAnalyzer(`
class A {
public a(p: 1): boolean;
public a(p: 2): number;
public a(p: 1 | 2): boolean | number {
return '' as any;
}
public b(a: number): string;
protected c(b: number | 1): {
e: 1
}
protected get compileUtils(): any | 'compileUtils' {
const abc = {
getConfig: (): ReadonlyDeep<InnerCompilerConfig> => {
return getCurrentCompileConfig() as any as unknown;
},
b(): void {}
}
}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 11, end: 37 },
text: ' public a(p: 1): boolean;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 38, end: 63 },
text: ' public a(p: 2): number;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 76, end: 83 },
text: ': 1 | 2',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 84, end: 102 },
text: ': boolean | number',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 118, end: 125 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 131, end: 161 },
text: ' public b(a: number): string;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 162, end: 206 },
text: ' protected c(b: number | 1): {\n e: 1\n }',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 237, end: 259 },
text: ": any | 'compileUtils'",
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 299, end: 334 },
text: ': ReadonlyDeep<InnerCompilerConfig>',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 380, end: 387 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 387, end: 398 },
text: ' as unknown',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 418, end: 424 },
text: ': void',
kind: TYPE_KIND.FUNCTION_RETURN
}
]);
});
it('constructor', () => {
const analyzer = new TypeAnalyzer(`
class A {
constructor(a: number) {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 26, end: 34 },
text: ': number',
kind: TYPE_KIND.FUNCTION_PARAMETER
}
]);
});
});
describe('tsx', () => {
it('generic arguments', () => {
const analyzer = new TypeAnalyzer(
`
const a = <Component<number> />
const b = <A<number, string> />
const c = <A<number, string, null> />
const d = <A
<number, string, null, 1, 2 | 3, [22]>
/>
`,
true
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 23, end: 31 },
text: '<number>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 49, end: 65 },
text: '<number, string>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 83, end: 105 },
text: '<number, string, null>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 128, end: 166 },
text: '<number, string, null, 1, 2 | 3, [22]>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
}
]);
});
it('integration', () => {
const analyzer = new TypeAnalyzer(
`
const a = <Component<number>
name
test={111 as any}
t2={\`...\${11 as string}\`}
{...test as object}
/>
`,
true
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 23, end: 31 },
text: '<number>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 58, end: 65 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 85, end: 95 },
text: ' as string',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 113, end: 123 },
text: ' as object',
kind: TYPE_KIND.AS_ASSERTION
}
]);
});
});
| src/core/helpers/type-analyzer/index.test.ts | xlboy-ts-type-hidden-a749a29 | [
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " ]);\n }\n }\n }\n private pushAnalyzedType(\n kind: AnalyzedType['kind'],\n range: [pos: number, end: number]\n ) {\n const [pos, end] = range;\n const text = this.sourceFile.text.slice(pos, end);",
"score": 0.8152363300323486
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " return this.pushAnalyzedType(TYPE_KIND.FUNCTION_CALL_GENERIC, [\n prevNode.end - 1,\n nextNode.pos + 1\n ]);\n }\n }\n // context: `<number>a`, get `<number>`\n function handleParentTypeAssertionExpr(\n this: TypeAnalyzer,\n parent: ts.TypeAssertion,",
"score": 0.8137384653091431
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " const prevNode = children[startIndex - 1];\n // >\n const nextNode = children[endIndex + 1];\n return this.pushAnalyzedType(TYPE_KIND.TSX_COMPONENT_GENERIC, [\n prevNode.end - 1,\n nextNode.pos\n ]);\n }\n }\n // [class] context: `class A { a?: number }`, get `?: number`",
"score": 0.8058413863182068
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " }\n return this.pushAnalyzedType(TYPE_KIND.FUNCTION_OVERLOAD, [\n startPos,\n parent.end\n ]);\n } else {\n return this.pushAnalyzedType(TYPE_KIND.FUNCTION_OVERLOAD, [\n parent.pos,\n parent.end\n ]);",
"score": 0.793312132358551
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " return this.pushAnalyzedType(TYPE_KIND.ANGLE_BRACKETS_ASSERTION, [\n prevNode.end - 1,\n nextNode.pos + 1\n ]);\n }\n // context = `a as number` | `a satisfies number`, curChild = `number`\n function handleParentAsOrSatisfiesExpr(\n this: TypeAnalyzer,\n parent: ts.AsExpression | ts.SatisfiesExpression,\n curChild: ts.Node",
"score": 0.7837023735046387
}
] | typescript | kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{ |
import { describe, expect, it } from 'vitest';
import { TypeAnalyzer } from '.';
import { TYPE_KIND } from './constants';
describe('function', () => {
it('overloading', () => {
const analyzer = new TypeAnalyzer(`
const t = 1
function a<B extends 222>(): void;
function b<A>(o: A): string;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 48 },
text: 'function a<B extends 222>(): void;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 49, end: 77 },
text: 'function b<A>(o: A): string;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
}
]);
});
it('function-generic-definition - a`<B extends ...>`()', () => {
const analyzer = new TypeAnalyzer(
`
function a<B extends 111, C extends 111>() {}
const b = <B extends 222, C extends 222>() => {};
const c = function<B extends 333, C extends 333>() {}
const d = {
a<B extends 444, C extends 444>() {}
}
`
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 11, end: 41 },
text: '<B extends 111, C extends 111>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 57, end: 87 },
text: '<B extends 222, C extends 222>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 115, end: 145 },
text: '<B extends 333, C extends 333>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 166, end: 196 },
text: '<B extends 444, C extends 444>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
}
]);
});
it('function-parameter - (`a: number, b: string, ...`)', () => {
const analyzer = new TypeAnalyzer(`
function a(a1: A111, a2?: A222) {}
const b = (b1: B111, b2?: B222) => {};
const c = function(c1: C111, c2?: C222) {}
const d = {
e(d1: E111, d2?: E222) {}
f: (f1: F111, f2?: F222) => {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 20 },
text: ': A111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 24, end: 31 },
text: '?: A222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 49, end: 55 },
text: ': B111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 59, end: 66 },
text: '?: B222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 96, end: 102 },
text: ': C111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 106, end: 113 },
text: '?: C222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 136, end: 142 },
text: ': E111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 146, end: 153 },
text: '?: E222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 166, end: 172 },
text: ': F111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 176, end: 183 },
text: '?: F222',
kind: TYPE_KIND.FUNCTION_PARAMETER
}
]);
});
it('function-return - ()`: number`', () => {
const analyzer = new TypeAnalyzer(`n
function a(): A111 {}
const b = (): B111 => {};
const c = function(): C111 {}
const d = {
d(): D111 {}
e: (): E111 => {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 20 },
text: ': A111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 36, end: 42 },
text: ': B111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 70, end: 76 },
text: ': C111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 97, end: 103 },
text: ': D111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 114, end: 120 },
text: ': E111',
kind: TYPE_KIND.FUNCTION_RETURN
}
]);
});
it('function-type-predicate - (a: any)`: asserts a is ...)`', () => {
const analyzer = new TypeAnalyzer(`
function a(value): asserts a is aaa {}
const b = (value): asserts b is bbb => {};
const c = function (value): asserts d is ddd {};
const d = {
e(value): asserts e is eee {},
f: (value): asserts f is fff => {}
};
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 18, end: 36 },
text: ': asserts a is aaa',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 58, end: 76 },
text: ': asserts b is bbb',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 111, end: 129 },
text: ': asserts d is ddd',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 157, end: 175 },
text: ': asserts e is eee',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 192, end: 210 },
text: ': asserts f is fff',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
}
]);
});
});
it('interface', () => {
const analyzer = new TypeAnalyzer(`
interface t {};
interface A111 {
a: number;
b: string;
c: {
e: 1
}
}`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 15 },
text: 'interface t {}',
kind: TYPE_KIND.INTERFACE
},
{
range: { pos: 17, end: 81 },
text: 'interface A111 {\n a: number;\n b: string;\n c: {\n e: 1\n }\n}',
kind: TYPE_KIND.INTERFACE
}
]);
});
it('type alias', () => {
const analyzer = new TypeAnalyzer(`
type t = number;
type A111 = {
a: number;
} | 123 & {}`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 17 },
text: 'type t = number;',
kind: TYPE_KIND.TYPE_ALIAS
},
{
range: { pos: 18, end: 58 },
text: 'type A111 = {\n a: number;\n} | 123 & {}',
kind: TYPE_KIND.TYPE_ALIAS
}
]);
});
it('variable type definition', () => {
const analyzer = new TypeAnalyzer(`
const a = 1;
declare const b: number, c: string;
const d: number, e: string;
const eee: null | string = ''
let fff!: string = ''
using ggg: usingAny = fn();
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 49 },
text: 'declare const b: number, c: string;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 57, end: 65 },
text: ': number',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 68, end: 76 },
text: ': string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 87, end: 102 },
text: ': null | string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 115, end: 124 },
text: '!: string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 139, end: 149 },
text: ': usingAny',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
}
]);
});
it('declare statement', () => {
const analyzer = new TypeAnalyzer(`
declare const a: number;
declare function b(): number;
declare class c {}
declare module d {}
declare namespace e {}
declare enum f {}
declare global {}
declare module 'g' {}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 25 },
text: 'declare const a: number;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 26, end: 55 },
text: 'declare function b(): number;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 56, end: 74 },
text: 'declare class c {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 75, end: 94 },
text: 'declare module d {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 95, end: 117 },
text: 'declare namespace e {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 118, end: 135 },
text: 'declare enum f {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 136, end: 153 },
text: 'declare global {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 154, end: 175 },
text: "declare module 'g' {}",
kind: TYPE_KIND.DECLARE_STATEMENT
}
]);
});
it('as expression', () => {
const analyzer = new TypeAnalyzer(`
const a = 1 as number;
const b = 1 as number | string;
const c = 1 as number | string | null as 111 as 3;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 12, end: 22 },
text: ' as number',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 35, end: 54 },
text: ' as number | string',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 67, end: 93 },
text: ' as number | string | null',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 93, end: 100 },
text: ' as 111',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 100, end: 105 },
text: ' as 3',
kind: TYPE_KIND.AS_ASSERTION
}
]);
});
it('satisfies expression', () => {
const analyzer = new TypeAnalyzer(`
const a = 1 satisfies number;
const b = 1 satisfies number | string;
const c = 1 satisfies number | string | null;
const d = () => {
return 333 satisfies any
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 12, end: 29 },
text: ' satisfies number',
kind: | TYPE_KIND.SATISFIES_OPERATOR
},
{ |
range: { pos: 42, end: 68 },
text: ' satisfies number | string',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 81, end: 114 },
text: ' satisfies number | string | null',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 147, end: 161 },
text: ' satisfies any',
kind: TYPE_KIND.SATISFIES_OPERATOR
}
]);
});
it('satisfies & as', () => {
const analyzer = new TypeAnalyzer(`
const a = {} satisfies {} as const;
const b = {} as const satisfies {};
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
kind: TYPE_KIND.SATISFIES_OPERATOR,
range: { pos: 13, end: 26 },
text: ' satisfies {}'
},
{
kind: TYPE_KIND.AS_ASSERTION,
range: { pos: 26, end: 35 },
text: ' as const'
},
{
kind: TYPE_KIND.AS_ASSERTION,
range: { pos: 49, end: 58 },
text: ' as const'
},
{
kind: TYPE_KIND.SATISFIES_OPERATOR,
range: { pos: 58, end: 71 },
text: ' satisfies {}'
}
]);
});
it('type assertion', () => {
const analyzer = new TypeAnalyzer(`
const a =<number>1;
const b = <number | string>1;
const c = <number | string | null>1;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 10, end: 18 },
text: '<number>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{
range: { pos: 31, end: 48 },
text: '<number | string>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{
range: { pos: 61, end: 85 },
text: '<number | string | null>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
}
]);
});
it('call expression', () => {
const analyzer = new TypeAnalyzer(`
b<number>();
new d<number, string>();
f<number, string, null>();
new Set<PersistListener<S>>()
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 2, end: 10 },
text: '<number>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { pos: 19, end: 35 },
text: '<number, string>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { pos: 40, end: 62 },
text: '<number, string, null>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { end: 93, pos: 73 },
text: '<PersistListener<S>>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
}
]);
});
describe('class', () => {
it('property type definition', () => {
const analyzer = new TypeAnalyzer(`
class A {
a: number;
public b: string;
protected c: {
e: 1
}
private d: () => void = () => {}
e!: boolean;
g?: string;
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 22 },
text: ': number',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 34, end: 42 },
text: ': string',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 57, end: 73 },
text: ': {\n e: 1\n }',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 85, end: 97 },
text: ': () => void',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 112, end: 122 },
text: '!: boolean',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { end: 136, pos: 127 },
text: '?: string',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
}
]);
});
it('method declaration', () => {
const analyzer = new TypeAnalyzer(`
class A {
public a(p: 1): boolean;
public a(p: 2): number;
public a(p: 1 | 2): boolean | number {
return '' as any;
}
public b(a: number): string;
protected c(b: number | 1): {
e: 1
}
protected get compileUtils(): any | 'compileUtils' {
const abc = {
getConfig: (): ReadonlyDeep<InnerCompilerConfig> => {
return getCurrentCompileConfig() as any as unknown;
},
b(): void {}
}
}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 11, end: 37 },
text: ' public a(p: 1): boolean;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 38, end: 63 },
text: ' public a(p: 2): number;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 76, end: 83 },
text: ': 1 | 2',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 84, end: 102 },
text: ': boolean | number',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 118, end: 125 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 131, end: 161 },
text: ' public b(a: number): string;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 162, end: 206 },
text: ' protected c(b: number | 1): {\n e: 1\n }',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 237, end: 259 },
text: ": any | 'compileUtils'",
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 299, end: 334 },
text: ': ReadonlyDeep<InnerCompilerConfig>',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 380, end: 387 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 387, end: 398 },
text: ' as unknown',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 418, end: 424 },
text: ': void',
kind: TYPE_KIND.FUNCTION_RETURN
}
]);
});
it('constructor', () => {
const analyzer = new TypeAnalyzer(`
class A {
constructor(a: number) {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 26, end: 34 },
text: ': number',
kind: TYPE_KIND.FUNCTION_PARAMETER
}
]);
});
});
describe('tsx', () => {
it('generic arguments', () => {
const analyzer = new TypeAnalyzer(
`
const a = <Component<number> />
const b = <A<number, string> />
const c = <A<number, string, null> />
const d = <A
<number, string, null, 1, 2 | 3, [22]>
/>
`,
true
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 23, end: 31 },
text: '<number>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 49, end: 65 },
text: '<number, string>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 83, end: 105 },
text: '<number, string, null>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 128, end: 166 },
text: '<number, string, null, 1, 2 | 3, [22]>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
}
]);
});
it('integration', () => {
const analyzer = new TypeAnalyzer(
`
const a = <Component<number>
name
test={111 as any}
t2={\`...\${11 as string}\`}
{...test as object}
/>
`,
true
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 23, end: 31 },
text: '<number>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 58, end: 65 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 85, end: 95 },
text: ' as string',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 113, end: 123 },
text: ' as object',
kind: TYPE_KIND.AS_ASSERTION
}
]);
});
});
| src/core/helpers/type-analyzer/index.test.ts | xlboy-ts-type-hidden-a749a29 | [
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " ]);\n }\n }\n }\n private pushAnalyzedType(\n kind: AnalyzedType['kind'],\n range: [pos: number, end: number]\n ) {\n const [pos, end] = range;\n const text = this.sourceFile.text.slice(pos, end);",
"score": 0.7689240574836731
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " return this.pushAnalyzedType(TYPE_KIND.ANGLE_BRACKETS_ASSERTION, [\n prevNode.end - 1,\n nextNode.pos + 1\n ]);\n }\n // context = `a as number` | `a satisfies number`, curChild = `number`\n function handleParentAsOrSatisfiesExpr(\n this: TypeAnalyzer,\n parent: ts.AsExpression | ts.SatisfiesExpression,\n curChild: ts.Node",
"score": 0.7674592137336731
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " const prevNode = children[startIndex - 1];\n // >\n const nextNode = children[endIndex + 1];\n return this.pushAnalyzedType(TYPE_KIND.TSX_COMPONENT_GENERIC, [\n prevNode.end - 1,\n nextNode.pos\n ]);\n }\n }\n // [class] context: `class A { a?: number }`, get `?: number`",
"score": 0.7571539282798767
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " return this.pushAnalyzedType(TYPE_KIND.FUNCTION_CALL_GENERIC, [\n prevNode.end - 1,\n nextNode.pos + 1\n ]);\n }\n }\n // context: `<number>a`, get `<number>`\n function handleParentTypeAssertionExpr(\n this: TypeAnalyzer,\n parent: ts.TypeAssertion,",
"score": 0.7550082206726074
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " }\n return this.pushAnalyzedType(TYPE_KIND.FUNCTION_OVERLOAD, [\n startPos,\n parent.end\n ]);\n } else {\n return this.pushAnalyzedType(TYPE_KIND.FUNCTION_OVERLOAD, [\n parent.pos,\n parent.end\n ]);",
"score": 0.7453099489212036
}
] | typescript | TYPE_KIND.SATISFIES_OPERATOR
},
{ |
import { describe, expect, it } from 'vitest';
import { TypeAnalyzer } from '.';
import { TYPE_KIND } from './constants';
describe('function', () => {
it('overloading', () => {
const analyzer = new TypeAnalyzer(`
const t = 1
function a<B extends 222>(): void;
function b<A>(o: A): string;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 48 },
text: 'function a<B extends 222>(): void;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 49, end: 77 },
text: 'function b<A>(o: A): string;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
}
]);
});
it('function-generic-definition - a`<B extends ...>`()', () => {
const analyzer = new TypeAnalyzer(
`
function a<B extends 111, C extends 111>() {}
const b = <B extends 222, C extends 222>() => {};
const c = function<B extends 333, C extends 333>() {}
const d = {
a<B extends 444, C extends 444>() {}
}
`
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 11, end: 41 },
text: '<B extends 111, C extends 111>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 57, end: 87 },
text: '<B extends 222, C extends 222>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 115, end: 145 },
text: '<B extends 333, C extends 333>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 166, end: 196 },
text: '<B extends 444, C extends 444>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
}
]);
});
it('function-parameter - (`a: number, b: string, ...`)', () => {
const analyzer = new TypeAnalyzer(`
function a(a1: A111, a2?: A222) {}
const b = (b1: B111, b2?: B222) => {};
const c = function(c1: C111, c2?: C222) {}
const d = {
e(d1: E111, d2?: E222) {}
f: (f1: F111, f2?: F222) => {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 20 },
text: ': A111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 24, end: 31 },
text: '?: A222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 49, end: 55 },
text: ': B111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 59, end: 66 },
text: '?: B222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 96, end: 102 },
text: ': C111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 106, end: 113 },
text: '?: C222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 136, end: 142 },
text: ': E111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 146, end: 153 },
text: '?: E222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 166, end: 172 },
text: ': F111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 176, end: 183 },
text: '?: F222',
kind: TYPE_KIND.FUNCTION_PARAMETER
}
]);
});
it('function-return - ()`: number`', () => {
const analyzer = new TypeAnalyzer(`n
function a(): A111 {}
const b = (): B111 => {};
const c = function(): C111 {}
const d = {
d(): D111 {}
e: (): E111 => {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 20 },
text: ': A111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 36, end: 42 },
text: ': B111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 70, end: 76 },
text: ': C111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 97, end: 103 },
text: ': D111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 114, end: 120 },
text: ': E111',
kind: TYPE_KIND.FUNCTION_RETURN
}
]);
});
it('function-type-predicate - (a: any)`: asserts a is ...)`', () => {
const analyzer = new TypeAnalyzer(`
function a(value): asserts a is aaa {}
const b = (value): asserts b is bbb => {};
const c = function (value): asserts d is ddd {};
const d = {
e(value): asserts e is eee {},
f: (value): asserts f is fff => {}
};
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 18, end: 36 },
text: ': asserts a is aaa',
kind: | TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{ |
range: { pos: 58, end: 76 },
text: ': asserts b is bbb',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 111, end: 129 },
text: ': asserts d is ddd',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 157, end: 175 },
text: ': asserts e is eee',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 192, end: 210 },
text: ': asserts f is fff',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
}
]);
});
});
it('interface', () => {
const analyzer = new TypeAnalyzer(`
interface t {};
interface A111 {
a: number;
b: string;
c: {
e: 1
}
}`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 15 },
text: 'interface t {}',
kind: TYPE_KIND.INTERFACE
},
{
range: { pos: 17, end: 81 },
text: 'interface A111 {\n a: number;\n b: string;\n c: {\n e: 1\n }\n}',
kind: TYPE_KIND.INTERFACE
}
]);
});
it('type alias', () => {
const analyzer = new TypeAnalyzer(`
type t = number;
type A111 = {
a: number;
} | 123 & {}`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 17 },
text: 'type t = number;',
kind: TYPE_KIND.TYPE_ALIAS
},
{
range: { pos: 18, end: 58 },
text: 'type A111 = {\n a: number;\n} | 123 & {}',
kind: TYPE_KIND.TYPE_ALIAS
}
]);
});
it('variable type definition', () => {
const analyzer = new TypeAnalyzer(`
const a = 1;
declare const b: number, c: string;
const d: number, e: string;
const eee: null | string = ''
let fff!: string = ''
using ggg: usingAny = fn();
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 49 },
text: 'declare const b: number, c: string;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 57, end: 65 },
text: ': number',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 68, end: 76 },
text: ': string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 87, end: 102 },
text: ': null | string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 115, end: 124 },
text: '!: string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 139, end: 149 },
text: ': usingAny',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
}
]);
});
it('declare statement', () => {
const analyzer = new TypeAnalyzer(`
declare const a: number;
declare function b(): number;
declare class c {}
declare module d {}
declare namespace e {}
declare enum f {}
declare global {}
declare module 'g' {}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 25 },
text: 'declare const a: number;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 26, end: 55 },
text: 'declare function b(): number;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 56, end: 74 },
text: 'declare class c {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 75, end: 94 },
text: 'declare module d {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 95, end: 117 },
text: 'declare namespace e {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 118, end: 135 },
text: 'declare enum f {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 136, end: 153 },
text: 'declare global {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 154, end: 175 },
text: "declare module 'g' {}",
kind: TYPE_KIND.DECLARE_STATEMENT
}
]);
});
it('as expression', () => {
const analyzer = new TypeAnalyzer(`
const a = 1 as number;
const b = 1 as number | string;
const c = 1 as number | string | null as 111 as 3;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 12, end: 22 },
text: ' as number',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 35, end: 54 },
text: ' as number | string',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 67, end: 93 },
text: ' as number | string | null',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 93, end: 100 },
text: ' as 111',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 100, end: 105 },
text: ' as 3',
kind: TYPE_KIND.AS_ASSERTION
}
]);
});
it('satisfies expression', () => {
const analyzer = new TypeAnalyzer(`
const a = 1 satisfies number;
const b = 1 satisfies number | string;
const c = 1 satisfies number | string | null;
const d = () => {
return 333 satisfies any
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 12, end: 29 },
text: ' satisfies number',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 42, end: 68 },
text: ' satisfies number | string',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 81, end: 114 },
text: ' satisfies number | string | null',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 147, end: 161 },
text: ' satisfies any',
kind: TYPE_KIND.SATISFIES_OPERATOR
}
]);
});
it('satisfies & as', () => {
const analyzer = new TypeAnalyzer(`
const a = {} satisfies {} as const;
const b = {} as const satisfies {};
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
kind: TYPE_KIND.SATISFIES_OPERATOR,
range: { pos: 13, end: 26 },
text: ' satisfies {}'
},
{
kind: TYPE_KIND.AS_ASSERTION,
range: { pos: 26, end: 35 },
text: ' as const'
},
{
kind: TYPE_KIND.AS_ASSERTION,
range: { pos: 49, end: 58 },
text: ' as const'
},
{
kind: TYPE_KIND.SATISFIES_OPERATOR,
range: { pos: 58, end: 71 },
text: ' satisfies {}'
}
]);
});
it('type assertion', () => {
const analyzer = new TypeAnalyzer(`
const a =<number>1;
const b = <number | string>1;
const c = <number | string | null>1;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 10, end: 18 },
text: '<number>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{
range: { pos: 31, end: 48 },
text: '<number | string>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{
range: { pos: 61, end: 85 },
text: '<number | string | null>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
}
]);
});
it('call expression', () => {
const analyzer = new TypeAnalyzer(`
b<number>();
new d<number, string>();
f<number, string, null>();
new Set<PersistListener<S>>()
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 2, end: 10 },
text: '<number>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { pos: 19, end: 35 },
text: '<number, string>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { pos: 40, end: 62 },
text: '<number, string, null>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { end: 93, pos: 73 },
text: '<PersistListener<S>>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
}
]);
});
describe('class', () => {
it('property type definition', () => {
const analyzer = new TypeAnalyzer(`
class A {
a: number;
public b: string;
protected c: {
e: 1
}
private d: () => void = () => {}
e!: boolean;
g?: string;
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 22 },
text: ': number',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 34, end: 42 },
text: ': string',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 57, end: 73 },
text: ': {\n e: 1\n }',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 85, end: 97 },
text: ': () => void',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 112, end: 122 },
text: '!: boolean',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { end: 136, pos: 127 },
text: '?: string',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
}
]);
});
it('method declaration', () => {
const analyzer = new TypeAnalyzer(`
class A {
public a(p: 1): boolean;
public a(p: 2): number;
public a(p: 1 | 2): boolean | number {
return '' as any;
}
public b(a: number): string;
protected c(b: number | 1): {
e: 1
}
protected get compileUtils(): any | 'compileUtils' {
const abc = {
getConfig: (): ReadonlyDeep<InnerCompilerConfig> => {
return getCurrentCompileConfig() as any as unknown;
},
b(): void {}
}
}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 11, end: 37 },
text: ' public a(p: 1): boolean;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 38, end: 63 },
text: ' public a(p: 2): number;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 76, end: 83 },
text: ': 1 | 2',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 84, end: 102 },
text: ': boolean | number',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 118, end: 125 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 131, end: 161 },
text: ' public b(a: number): string;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 162, end: 206 },
text: ' protected c(b: number | 1): {\n e: 1\n }',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 237, end: 259 },
text: ": any | 'compileUtils'",
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 299, end: 334 },
text: ': ReadonlyDeep<InnerCompilerConfig>',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 380, end: 387 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 387, end: 398 },
text: ' as unknown',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 418, end: 424 },
text: ': void',
kind: TYPE_KIND.FUNCTION_RETURN
}
]);
});
it('constructor', () => {
const analyzer = new TypeAnalyzer(`
class A {
constructor(a: number) {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 26, end: 34 },
text: ': number',
kind: TYPE_KIND.FUNCTION_PARAMETER
}
]);
});
});
describe('tsx', () => {
it('generic arguments', () => {
const analyzer = new TypeAnalyzer(
`
const a = <Component<number> />
const b = <A<number, string> />
const c = <A<number, string, null> />
const d = <A
<number, string, null, 1, 2 | 3, [22]>
/>
`,
true
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 23, end: 31 },
text: '<number>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 49, end: 65 },
text: '<number, string>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 83, end: 105 },
text: '<number, string, null>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 128, end: 166 },
text: '<number, string, null, 1, 2 | 3, [22]>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
}
]);
});
it('integration', () => {
const analyzer = new TypeAnalyzer(
`
const a = <Component<number>
name
test={111 as any}
t2={\`...\${11 as string}\`}
{...test as object}
/>
`,
true
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 23, end: 31 },
text: '<number>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 58, end: 65 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 85, end: 95 },
text: ' as string',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 113, end: 123 },
text: ' as object',
kind: TYPE_KIND.AS_ASSERTION
}
]);
});
});
| src/core/helpers/type-analyzer/index.test.ts | xlboy-ts-type-hidden-a749a29 | [
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " return this.pushAnalyzedType(TYPE_KIND.FUNCTION_CALL_GENERIC, [\n prevNode.end - 1,\n nextNode.pos + 1\n ]);\n }\n }\n // context: `<number>a`, get `<number>`\n function handleParentTypeAssertionExpr(\n this: TypeAnalyzer,\n parent: ts.TypeAssertion,",
"score": 0.7832475900650024
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " ]);\n }\n }\n }\n private pushAnalyzedType(\n kind: AnalyzedType['kind'],\n range: [pos: number, end: number]\n ) {\n const [pos, end] = range;\n const text = this.sourceFile.text.slice(pos, end);",
"score": 0.7772439122200012
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " const prevNode = children[startIndex - 1];\n // >\n const nextNode = children[endIndex + 1];\n return this.pushAnalyzedType(TYPE_KIND.TSX_COMPONENT_GENERIC, [\n prevNode.end - 1,\n nextNode.pos\n ]);\n }\n }\n // [class] context: `class A { a?: number }`, get `?: number`",
"score": 0.7742205262184143
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " return this.pushAnalyzedType(TYPE_KIND.ANGLE_BRACKETS_ASSERTION, [\n prevNode.end - 1,\n nextNode.pos + 1\n ]);\n }\n // context = `a as number` | `a satisfies number`, curChild = `number`\n function handleParentAsOrSatisfiesExpr(\n this: TypeAnalyzer,\n parent: ts.AsExpression | ts.SatisfiesExpression,\n curChild: ts.Node",
"score": 0.7692707777023315
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " }\n return this.pushAnalyzedType(TYPE_KIND.FUNCTION_OVERLOAD, [\n startPos,\n parent.end\n ]);\n } else {\n return this.pushAnalyzedType(TYPE_KIND.FUNCTION_OVERLOAD, [\n parent.pos,\n parent.end\n ]);",
"score": 0.76702481508255
}
] | typescript | TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{ |
import { describe, expect, it } from 'vitest';
import { TypeAnalyzer } from '.';
import { TYPE_KIND } from './constants';
describe('function', () => {
it('overloading', () => {
const analyzer = new TypeAnalyzer(`
const t = 1
function a<B extends 222>(): void;
function b<A>(o: A): string;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 48 },
text: 'function a<B extends 222>(): void;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 49, end: 77 },
text: 'function b<A>(o: A): string;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
}
]);
});
it('function-generic-definition - a`<B extends ...>`()', () => {
const analyzer = new TypeAnalyzer(
`
function a<B extends 111, C extends 111>() {}
const b = <B extends 222, C extends 222>() => {};
const c = function<B extends 333, C extends 333>() {}
const d = {
a<B extends 444, C extends 444>() {}
}
`
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 11, end: 41 },
text: '<B extends 111, C extends 111>',
| kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{ |
range: { pos: 57, end: 87 },
text: '<B extends 222, C extends 222>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 115, end: 145 },
text: '<B extends 333, C extends 333>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 166, end: 196 },
text: '<B extends 444, C extends 444>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
}
]);
});
it('function-parameter - (`a: number, b: string, ...`)', () => {
const analyzer = new TypeAnalyzer(`
function a(a1: A111, a2?: A222) {}
const b = (b1: B111, b2?: B222) => {};
const c = function(c1: C111, c2?: C222) {}
const d = {
e(d1: E111, d2?: E222) {}
f: (f1: F111, f2?: F222) => {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 20 },
text: ': A111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 24, end: 31 },
text: '?: A222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 49, end: 55 },
text: ': B111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 59, end: 66 },
text: '?: B222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 96, end: 102 },
text: ': C111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 106, end: 113 },
text: '?: C222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 136, end: 142 },
text: ': E111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 146, end: 153 },
text: '?: E222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 166, end: 172 },
text: ': F111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 176, end: 183 },
text: '?: F222',
kind: TYPE_KIND.FUNCTION_PARAMETER
}
]);
});
it('function-return - ()`: number`', () => {
const analyzer = new TypeAnalyzer(`n
function a(): A111 {}
const b = (): B111 => {};
const c = function(): C111 {}
const d = {
d(): D111 {}
e: (): E111 => {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 20 },
text: ': A111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 36, end: 42 },
text: ': B111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 70, end: 76 },
text: ': C111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 97, end: 103 },
text: ': D111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 114, end: 120 },
text: ': E111',
kind: TYPE_KIND.FUNCTION_RETURN
}
]);
});
it('function-type-predicate - (a: any)`: asserts a is ...)`', () => {
const analyzer = new TypeAnalyzer(`
function a(value): asserts a is aaa {}
const b = (value): asserts b is bbb => {};
const c = function (value): asserts d is ddd {};
const d = {
e(value): asserts e is eee {},
f: (value): asserts f is fff => {}
};
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 18, end: 36 },
text: ': asserts a is aaa',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 58, end: 76 },
text: ': asserts b is bbb',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 111, end: 129 },
text: ': asserts d is ddd',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 157, end: 175 },
text: ': asserts e is eee',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 192, end: 210 },
text: ': asserts f is fff',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
}
]);
});
});
it('interface', () => {
const analyzer = new TypeAnalyzer(`
interface t {};
interface A111 {
a: number;
b: string;
c: {
e: 1
}
}`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 15 },
text: 'interface t {}',
kind: TYPE_KIND.INTERFACE
},
{
range: { pos: 17, end: 81 },
text: 'interface A111 {\n a: number;\n b: string;\n c: {\n e: 1\n }\n}',
kind: TYPE_KIND.INTERFACE
}
]);
});
it('type alias', () => {
const analyzer = new TypeAnalyzer(`
type t = number;
type A111 = {
a: number;
} | 123 & {}`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 17 },
text: 'type t = number;',
kind: TYPE_KIND.TYPE_ALIAS
},
{
range: { pos: 18, end: 58 },
text: 'type A111 = {\n a: number;\n} | 123 & {}',
kind: TYPE_KIND.TYPE_ALIAS
}
]);
});
it('variable type definition', () => {
const analyzer = new TypeAnalyzer(`
const a = 1;
declare const b: number, c: string;
const d: number, e: string;
const eee: null | string = ''
let fff!: string = ''
using ggg: usingAny = fn();
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 49 },
text: 'declare const b: number, c: string;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 57, end: 65 },
text: ': number',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 68, end: 76 },
text: ': string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 87, end: 102 },
text: ': null | string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 115, end: 124 },
text: '!: string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 139, end: 149 },
text: ': usingAny',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
}
]);
});
it('declare statement', () => {
const analyzer = new TypeAnalyzer(`
declare const a: number;
declare function b(): number;
declare class c {}
declare module d {}
declare namespace e {}
declare enum f {}
declare global {}
declare module 'g' {}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 25 },
text: 'declare const a: number;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 26, end: 55 },
text: 'declare function b(): number;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 56, end: 74 },
text: 'declare class c {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 75, end: 94 },
text: 'declare module d {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 95, end: 117 },
text: 'declare namespace e {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 118, end: 135 },
text: 'declare enum f {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 136, end: 153 },
text: 'declare global {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 154, end: 175 },
text: "declare module 'g' {}",
kind: TYPE_KIND.DECLARE_STATEMENT
}
]);
});
it('as expression', () => {
const analyzer = new TypeAnalyzer(`
const a = 1 as number;
const b = 1 as number | string;
const c = 1 as number | string | null as 111 as 3;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 12, end: 22 },
text: ' as number',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 35, end: 54 },
text: ' as number | string',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 67, end: 93 },
text: ' as number | string | null',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 93, end: 100 },
text: ' as 111',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 100, end: 105 },
text: ' as 3',
kind: TYPE_KIND.AS_ASSERTION
}
]);
});
it('satisfies expression', () => {
const analyzer = new TypeAnalyzer(`
const a = 1 satisfies number;
const b = 1 satisfies number | string;
const c = 1 satisfies number | string | null;
const d = () => {
return 333 satisfies any
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 12, end: 29 },
text: ' satisfies number',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 42, end: 68 },
text: ' satisfies number | string',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 81, end: 114 },
text: ' satisfies number | string | null',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 147, end: 161 },
text: ' satisfies any',
kind: TYPE_KIND.SATISFIES_OPERATOR
}
]);
});
it('satisfies & as', () => {
const analyzer = new TypeAnalyzer(`
const a = {} satisfies {} as const;
const b = {} as const satisfies {};
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
kind: TYPE_KIND.SATISFIES_OPERATOR,
range: { pos: 13, end: 26 },
text: ' satisfies {}'
},
{
kind: TYPE_KIND.AS_ASSERTION,
range: { pos: 26, end: 35 },
text: ' as const'
},
{
kind: TYPE_KIND.AS_ASSERTION,
range: { pos: 49, end: 58 },
text: ' as const'
},
{
kind: TYPE_KIND.SATISFIES_OPERATOR,
range: { pos: 58, end: 71 },
text: ' satisfies {}'
}
]);
});
it('type assertion', () => {
const analyzer = new TypeAnalyzer(`
const a =<number>1;
const b = <number | string>1;
const c = <number | string | null>1;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 10, end: 18 },
text: '<number>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{
range: { pos: 31, end: 48 },
text: '<number | string>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{
range: { pos: 61, end: 85 },
text: '<number | string | null>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
}
]);
});
it('call expression', () => {
const analyzer = new TypeAnalyzer(`
b<number>();
new d<number, string>();
f<number, string, null>();
new Set<PersistListener<S>>()
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 2, end: 10 },
text: '<number>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { pos: 19, end: 35 },
text: '<number, string>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { pos: 40, end: 62 },
text: '<number, string, null>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { end: 93, pos: 73 },
text: '<PersistListener<S>>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
}
]);
});
describe('class', () => {
it('property type definition', () => {
const analyzer = new TypeAnalyzer(`
class A {
a: number;
public b: string;
protected c: {
e: 1
}
private d: () => void = () => {}
e!: boolean;
g?: string;
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 22 },
text: ': number',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 34, end: 42 },
text: ': string',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 57, end: 73 },
text: ': {\n e: 1\n }',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 85, end: 97 },
text: ': () => void',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 112, end: 122 },
text: '!: boolean',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { end: 136, pos: 127 },
text: '?: string',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
}
]);
});
it('method declaration', () => {
const analyzer = new TypeAnalyzer(`
class A {
public a(p: 1): boolean;
public a(p: 2): number;
public a(p: 1 | 2): boolean | number {
return '' as any;
}
public b(a: number): string;
protected c(b: number | 1): {
e: 1
}
protected get compileUtils(): any | 'compileUtils' {
const abc = {
getConfig: (): ReadonlyDeep<InnerCompilerConfig> => {
return getCurrentCompileConfig() as any as unknown;
},
b(): void {}
}
}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 11, end: 37 },
text: ' public a(p: 1): boolean;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 38, end: 63 },
text: ' public a(p: 2): number;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 76, end: 83 },
text: ': 1 | 2',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 84, end: 102 },
text: ': boolean | number',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 118, end: 125 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 131, end: 161 },
text: ' public b(a: number): string;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 162, end: 206 },
text: ' protected c(b: number | 1): {\n e: 1\n }',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 237, end: 259 },
text: ": any | 'compileUtils'",
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 299, end: 334 },
text: ': ReadonlyDeep<InnerCompilerConfig>',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 380, end: 387 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 387, end: 398 },
text: ' as unknown',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 418, end: 424 },
text: ': void',
kind: TYPE_KIND.FUNCTION_RETURN
}
]);
});
it('constructor', () => {
const analyzer = new TypeAnalyzer(`
class A {
constructor(a: number) {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 26, end: 34 },
text: ': number',
kind: TYPE_KIND.FUNCTION_PARAMETER
}
]);
});
});
describe('tsx', () => {
it('generic arguments', () => {
const analyzer = new TypeAnalyzer(
`
const a = <Component<number> />
const b = <A<number, string> />
const c = <A<number, string, null> />
const d = <A
<number, string, null, 1, 2 | 3, [22]>
/>
`,
true
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 23, end: 31 },
text: '<number>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 49, end: 65 },
text: '<number, string>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 83, end: 105 },
text: '<number, string, null>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 128, end: 166 },
text: '<number, string, null, 1, 2 | 3, [22]>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
}
]);
});
it('integration', () => {
const analyzer = new TypeAnalyzer(
`
const a = <Component<number>
name
test={111 as any}
t2={\`...\${11 as string}\`}
{...test as object}
/>
`,
true
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 23, end: 31 },
text: '<number>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 58, end: 65 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 85, end: 95 },
text: ' as string',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 113, end: 123 },
text: ' as object',
kind: TYPE_KIND.AS_ASSERTION
}
]);
});
});
| src/core/helpers/type-analyzer/index.test.ts | xlboy-ts-type-hidden-a749a29 | [
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " const prevNode = children[startIndex - 1];\n // >\n const nextNode = children[endIndex + 1];\n return this.pushAnalyzedType(TYPE_KIND.TSX_COMPONENT_GENERIC, [\n prevNode.end - 1,\n nextNode.pos\n ]);\n }\n }\n // [class] context: `class A { a?: number }`, get `?: number`",
"score": 0.7934115529060364
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " return this.pushAnalyzedType(TYPE_KIND.FUNCTION_CALL_GENERIC, [\n prevNode.end - 1,\n nextNode.pos + 1\n ]);\n }\n }\n // context: `<number>a`, get `<number>`\n function handleParentTypeAssertionExpr(\n this: TypeAnalyzer,\n parent: ts.TypeAssertion,",
"score": 0.7861771583557129
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " }\n return this.pushAnalyzedType(TYPE_KIND.FUNCTION_OVERLOAD, [\n startPos,\n parent.end\n ]);\n } else {\n return this.pushAnalyzedType(TYPE_KIND.FUNCTION_OVERLOAD, [\n parent.pos,\n parent.end\n ]);",
"score": 0.7817461490631104
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " ]);\n }\n }\n }\n private pushAnalyzedType(\n kind: AnalyzedType['kind'],\n range: [pos: number, end: number]\n ) {\n const [pos, end] = range;\n const text = this.sourceFile.text.slice(pos, end);",
"score": 0.7739237546920776
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " return this.pushAnalyzedType(TYPE_KIND.ANGLE_BRACKETS_ASSERTION, [\n prevNode.end - 1,\n nextNode.pos + 1\n ]);\n }\n // context = `a as number` | `a satisfies number`, curChild = `number`\n function handleParentAsOrSatisfiesExpr(\n this: TypeAnalyzer,\n parent: ts.AsExpression | ts.SatisfiesExpression,\n curChild: ts.Node",
"score": 0.7604323029518127
}
] | typescript | kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{ |
import { describe, expect, it } from 'vitest';
import { TypeAnalyzer } from '.';
import { TYPE_KIND } from './constants';
describe('function', () => {
it('overloading', () => {
const analyzer = new TypeAnalyzer(`
const t = 1
function a<B extends 222>(): void;
function b<A>(o: A): string;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 48 },
text: 'function a<B extends 222>(): void;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 49, end: 77 },
text: 'function b<A>(o: A): string;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
}
]);
});
it('function-generic-definition - a`<B extends ...>`()', () => {
const analyzer = new TypeAnalyzer(
`
function a<B extends 111, C extends 111>() {}
const b = <B extends 222, C extends 222>() => {};
const c = function<B extends 333, C extends 333>() {}
const d = {
a<B extends 444, C extends 444>() {}
}
`
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 11, end: 41 },
text: '<B extends 111, C extends 111>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 57, end: 87 },
text: '<B extends 222, C extends 222>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 115, end: 145 },
text: '<B extends 333, C extends 333>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 166, end: 196 },
text: '<B extends 444, C extends 444>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
}
]);
});
it('function-parameter - (`a: number, b: string, ...`)', () => {
const analyzer = new TypeAnalyzer(`
function a(a1: A111, a2?: A222) {}
const b = (b1: B111, b2?: B222) => {};
const c = function(c1: C111, c2?: C222) {}
const d = {
e(d1: E111, d2?: E222) {}
f: (f1: F111, f2?: F222) => {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 20 },
text: ': A111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 24, end: 31 },
text: '?: A222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 49, end: 55 },
text: ': B111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 59, end: 66 },
text: '?: B222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 96, end: 102 },
text: ': C111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 106, end: 113 },
text: '?: C222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 136, end: 142 },
text: ': E111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 146, end: 153 },
text: '?: E222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 166, end: 172 },
text: ': F111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 176, end: 183 },
text: '?: F222',
kind: TYPE_KIND.FUNCTION_PARAMETER
}
]);
});
it('function-return - ()`: number`', () => {
const analyzer = new TypeAnalyzer(`n
function a(): A111 {}
const b = (): B111 => {};
const c = function(): C111 {}
const d = {
d(): D111 {}
e: (): E111 => {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 20 },
text: ': A111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 36, end: 42 },
text: ': B111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 70, end: 76 },
text: ': C111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 97, end: 103 },
text: ': D111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 114, end: 120 },
text: ': E111',
kind: TYPE_KIND.FUNCTION_RETURN
}
]);
});
it('function-type-predicate - (a: any)`: asserts a is ...)`', () => {
const analyzer = new TypeAnalyzer(`
function a(value): asserts a is aaa {}
const b = (value): asserts b is bbb => {};
const c = function (value): asserts d is ddd {};
const d = {
e(value): asserts e is eee {},
f: (value): asserts f is fff => {}
};
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 18, end: 36 },
text: ': asserts a is aaa',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 58, end: 76 },
text: ': asserts b is bbb',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 111, end: 129 },
text: ': asserts d is ddd',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 157, end: 175 },
text: ': asserts e is eee',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 192, end: 210 },
text: ': asserts f is fff',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
}
]);
});
});
it('interface', () => {
const analyzer = new TypeAnalyzer(`
interface t {};
interface A111 {
a: number;
b: string;
c: {
e: 1
}
}`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 15 },
text: 'interface t {}',
| kind: TYPE_KIND.INTERFACE
},
{ |
range: { pos: 17, end: 81 },
text: 'interface A111 {\n a: number;\n b: string;\n c: {\n e: 1\n }\n}',
kind: TYPE_KIND.INTERFACE
}
]);
});
it('type alias', () => {
const analyzer = new TypeAnalyzer(`
type t = number;
type A111 = {
a: number;
} | 123 & {}`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 17 },
text: 'type t = number;',
kind: TYPE_KIND.TYPE_ALIAS
},
{
range: { pos: 18, end: 58 },
text: 'type A111 = {\n a: number;\n} | 123 & {}',
kind: TYPE_KIND.TYPE_ALIAS
}
]);
});
it('variable type definition', () => {
const analyzer = new TypeAnalyzer(`
const a = 1;
declare const b: number, c: string;
const d: number, e: string;
const eee: null | string = ''
let fff!: string = ''
using ggg: usingAny = fn();
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 49 },
text: 'declare const b: number, c: string;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 57, end: 65 },
text: ': number',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 68, end: 76 },
text: ': string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 87, end: 102 },
text: ': null | string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 115, end: 124 },
text: '!: string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 139, end: 149 },
text: ': usingAny',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
}
]);
});
it('declare statement', () => {
const analyzer = new TypeAnalyzer(`
declare const a: number;
declare function b(): number;
declare class c {}
declare module d {}
declare namespace e {}
declare enum f {}
declare global {}
declare module 'g' {}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 25 },
text: 'declare const a: number;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 26, end: 55 },
text: 'declare function b(): number;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 56, end: 74 },
text: 'declare class c {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 75, end: 94 },
text: 'declare module d {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 95, end: 117 },
text: 'declare namespace e {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 118, end: 135 },
text: 'declare enum f {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 136, end: 153 },
text: 'declare global {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 154, end: 175 },
text: "declare module 'g' {}",
kind: TYPE_KIND.DECLARE_STATEMENT
}
]);
});
it('as expression', () => {
const analyzer = new TypeAnalyzer(`
const a = 1 as number;
const b = 1 as number | string;
const c = 1 as number | string | null as 111 as 3;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 12, end: 22 },
text: ' as number',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 35, end: 54 },
text: ' as number | string',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 67, end: 93 },
text: ' as number | string | null',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 93, end: 100 },
text: ' as 111',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 100, end: 105 },
text: ' as 3',
kind: TYPE_KIND.AS_ASSERTION
}
]);
});
it('satisfies expression', () => {
const analyzer = new TypeAnalyzer(`
const a = 1 satisfies number;
const b = 1 satisfies number | string;
const c = 1 satisfies number | string | null;
const d = () => {
return 333 satisfies any
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 12, end: 29 },
text: ' satisfies number',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 42, end: 68 },
text: ' satisfies number | string',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 81, end: 114 },
text: ' satisfies number | string | null',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 147, end: 161 },
text: ' satisfies any',
kind: TYPE_KIND.SATISFIES_OPERATOR
}
]);
});
it('satisfies & as', () => {
const analyzer = new TypeAnalyzer(`
const a = {} satisfies {} as const;
const b = {} as const satisfies {};
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
kind: TYPE_KIND.SATISFIES_OPERATOR,
range: { pos: 13, end: 26 },
text: ' satisfies {}'
},
{
kind: TYPE_KIND.AS_ASSERTION,
range: { pos: 26, end: 35 },
text: ' as const'
},
{
kind: TYPE_KIND.AS_ASSERTION,
range: { pos: 49, end: 58 },
text: ' as const'
},
{
kind: TYPE_KIND.SATISFIES_OPERATOR,
range: { pos: 58, end: 71 },
text: ' satisfies {}'
}
]);
});
it('type assertion', () => {
const analyzer = new TypeAnalyzer(`
const a =<number>1;
const b = <number | string>1;
const c = <number | string | null>1;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 10, end: 18 },
text: '<number>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{
range: { pos: 31, end: 48 },
text: '<number | string>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{
range: { pos: 61, end: 85 },
text: '<number | string | null>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
}
]);
});
it('call expression', () => {
const analyzer = new TypeAnalyzer(`
b<number>();
new d<number, string>();
f<number, string, null>();
new Set<PersistListener<S>>()
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 2, end: 10 },
text: '<number>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { pos: 19, end: 35 },
text: '<number, string>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { pos: 40, end: 62 },
text: '<number, string, null>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { end: 93, pos: 73 },
text: '<PersistListener<S>>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
}
]);
});
describe('class', () => {
it('property type definition', () => {
const analyzer = new TypeAnalyzer(`
class A {
a: number;
public b: string;
protected c: {
e: 1
}
private d: () => void = () => {}
e!: boolean;
g?: string;
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 22 },
text: ': number',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 34, end: 42 },
text: ': string',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 57, end: 73 },
text: ': {\n e: 1\n }',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 85, end: 97 },
text: ': () => void',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 112, end: 122 },
text: '!: boolean',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { end: 136, pos: 127 },
text: '?: string',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
}
]);
});
it('method declaration', () => {
const analyzer = new TypeAnalyzer(`
class A {
public a(p: 1): boolean;
public a(p: 2): number;
public a(p: 1 | 2): boolean | number {
return '' as any;
}
public b(a: number): string;
protected c(b: number | 1): {
e: 1
}
protected get compileUtils(): any | 'compileUtils' {
const abc = {
getConfig: (): ReadonlyDeep<InnerCompilerConfig> => {
return getCurrentCompileConfig() as any as unknown;
},
b(): void {}
}
}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 11, end: 37 },
text: ' public a(p: 1): boolean;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 38, end: 63 },
text: ' public a(p: 2): number;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 76, end: 83 },
text: ': 1 | 2',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 84, end: 102 },
text: ': boolean | number',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 118, end: 125 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 131, end: 161 },
text: ' public b(a: number): string;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 162, end: 206 },
text: ' protected c(b: number | 1): {\n e: 1\n }',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 237, end: 259 },
text: ": any | 'compileUtils'",
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 299, end: 334 },
text: ': ReadonlyDeep<InnerCompilerConfig>',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 380, end: 387 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 387, end: 398 },
text: ' as unknown',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 418, end: 424 },
text: ': void',
kind: TYPE_KIND.FUNCTION_RETURN
}
]);
});
it('constructor', () => {
const analyzer = new TypeAnalyzer(`
class A {
constructor(a: number) {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 26, end: 34 },
text: ': number',
kind: TYPE_KIND.FUNCTION_PARAMETER
}
]);
});
});
describe('tsx', () => {
it('generic arguments', () => {
const analyzer = new TypeAnalyzer(
`
const a = <Component<number> />
const b = <A<number, string> />
const c = <A<number, string, null> />
const d = <A
<number, string, null, 1, 2 | 3, [22]>
/>
`,
true
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 23, end: 31 },
text: '<number>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 49, end: 65 },
text: '<number, string>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 83, end: 105 },
text: '<number, string, null>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 128, end: 166 },
text: '<number, string, null, 1, 2 | 3, [22]>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
}
]);
});
it('integration', () => {
const analyzer = new TypeAnalyzer(
`
const a = <Component<number>
name
test={111 as any}
t2={\`...\${11 as string}\`}
{...test as object}
/>
`,
true
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 23, end: 31 },
text: '<number>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 58, end: 65 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 85, end: 95 },
text: ' as string',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 113, end: 123 },
text: ' as object',
kind: TYPE_KIND.AS_ASSERTION
}
]);
});
});
| src/core/helpers/type-analyzer/index.test.ts | xlboy-ts-type-hidden-a749a29 | [
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " ]);\n }\n }\n }\n private pushAnalyzedType(\n kind: AnalyzedType['kind'],\n range: [pos: number, end: number]\n ) {\n const [pos, end] = range;\n const text = this.sourceFile.text.slice(pos, end);",
"score": 0.8087154626846313
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " const prevNode = children[startIndex - 1];\n // >\n const nextNode = children[endIndex + 1];\n return this.pushAnalyzedType(TYPE_KIND.TSX_COMPONENT_GENERIC, [\n prevNode.end - 1,\n nextNode.pos\n ]);\n }\n }\n // [class] context: `class A { a?: number }`, get `?: number`",
"score": 0.7782868146896362
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " return this.pushAnalyzedType(TYPE_KIND.FUNCTION_CALL_GENERIC, [\n prevNode.end - 1,\n nextNode.pos + 1\n ]);\n }\n }\n // context: `<number>a`, get `<number>`\n function handleParentTypeAssertionExpr(\n this: TypeAnalyzer,\n parent: ts.TypeAssertion,",
"score": 0.7765229344367981
},
{
"filename": "src/core/editor-context.ts",
"retrieved_chunk": " // `[[0, 11], [25, 44]]`, get `[11, 25]`\n activeEditorWindow.visibleRanges.forEach((range, index, visibleRanges) => {\n if (visibleRanges.length === 1 || index === visibleRanges.length - 1) return;\n const endLine = range.end.line;\n const nextStartLine = visibleRanges[index + 1]!.start.line;\n foldingRanges.push({ start: endLine, end: nextStartLine });\n });\n return foldingRanges;\n }\n };",
"score": 0.7645809054374695
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " }\n return this.pushAnalyzedType(TYPE_KIND.FUNCTION_OVERLOAD, [\n startPos,\n parent.end\n ]);\n } else {\n return this.pushAnalyzedType(TYPE_KIND.FUNCTION_OVERLOAD, [\n parent.pos,\n parent.end\n ]);",
"score": 0.7631831169128418
}
] | typescript | kind: TYPE_KIND.INTERFACE
},
{ |
import { describe, expect, it } from 'vitest';
import { TypeAnalyzer } from '.';
import { TYPE_KIND } from './constants';
describe('function', () => {
it('overloading', () => {
const analyzer = new TypeAnalyzer(`
const t = 1
function a<B extends 222>(): void;
function b<A>(o: A): string;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 48 },
text: 'function a<B extends 222>(): void;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 49, end: 77 },
text: 'function b<A>(o: A): string;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
}
]);
});
it('function-generic-definition - a`<B extends ...>`()', () => {
const analyzer = new TypeAnalyzer(
`
function a<B extends 111, C extends 111>() {}
const b = <B extends 222, C extends 222>() => {};
const c = function<B extends 333, C extends 333>() {}
const d = {
a<B extends 444, C extends 444>() {}
}
`
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 11, end: 41 },
text: '<B extends 111, C extends 111>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 57, end: 87 },
text: '<B extends 222, C extends 222>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 115, end: 145 },
text: '<B extends 333, C extends 333>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 166, end: 196 },
text: '<B extends 444, C extends 444>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
}
]);
});
it('function-parameter - (`a: number, b: string, ...`)', () => {
const analyzer = new TypeAnalyzer(`
function a(a1: A111, a2?: A222) {}
const b = (b1: B111, b2?: B222) => {};
const c = function(c1: C111, c2?: C222) {}
const d = {
e(d1: E111, d2?: E222) {}
f: (f1: F111, f2?: F222) => {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 20 },
text: ': A111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 24, end: 31 },
text: '?: A222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 49, end: 55 },
text: ': B111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 59, end: 66 },
text: '?: B222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 96, end: 102 },
text: ': C111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 106, end: 113 },
text: '?: C222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 136, end: 142 },
text: ': E111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 146, end: 153 },
text: '?: E222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 166, end: 172 },
text: ': F111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 176, end: 183 },
text: '?: F222',
kind: TYPE_KIND.FUNCTION_PARAMETER
}
]);
});
it('function-return - ()`: number`', () => {
const analyzer = new TypeAnalyzer(`n
function a(): A111 {}
const b = (): B111 => {};
const c = function(): C111 {}
const d = {
d(): D111 {}
e: (): E111 => {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 20 },
text: ': A111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 36, end: 42 },
text: ': B111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 70, end: 76 },
text: ': C111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 97, end: 103 },
text: ': D111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 114, end: 120 },
text: ': E111',
kind: TYPE_KIND.FUNCTION_RETURN
}
]);
});
it('function-type-predicate - (a: any)`: asserts a is ...)`', () => {
const analyzer = new TypeAnalyzer(`
function a(value): asserts a is aaa {}
const b = (value): asserts b is bbb => {};
const c = function (value): asserts d is ddd {};
const d = {
e(value): asserts e is eee {},
f: (value): asserts f is fff => {}
};
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 18, end: 36 },
text: ': asserts a is aaa',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 58, end: 76 },
text: ': asserts b is bbb',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 111, end: 129 },
text: ': asserts d is ddd',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 157, end: 175 },
text: ': asserts e is eee',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 192, end: 210 },
text: ': asserts f is fff',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
}
]);
});
});
it('interface', () => {
const analyzer = new TypeAnalyzer(`
interface t {};
interface A111 {
a: number;
b: string;
c: {
e: 1
}
}`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 15 },
text: 'interface t {}',
kind: TYPE_KIND.INTERFACE
},
{
range: { pos: 17, end: 81 },
text: 'interface A111 {\n a: number;\n b: string;\n c: {\n e: 1\n }\n}',
kind: TYPE_KIND.INTERFACE
}
]);
});
it('type alias', () => {
const analyzer = new TypeAnalyzer(`
type t = number;
type A111 = {
a: number;
} | 123 & {}`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 17 },
text: 'type t = number;',
kind: TYPE_KIND.TYPE_ALIAS
},
{
range: { pos: 18, end: 58 },
text: 'type A111 = {\n a: number;\n} | 123 & {}',
kind: TYPE_KIND.TYPE_ALIAS
}
]);
});
it('variable type definition', () => {
const analyzer = new TypeAnalyzer(`
const a = 1;
declare const b: number, c: string;
const d: number, e: string;
const eee: null | string = ''
let fff!: string = ''
using ggg: usingAny = fn();
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 49 },
text: 'declare const b: number, c: string;',
kind: | TYPE_KIND.DECLARE_STATEMENT
},
{ |
range: { pos: 57, end: 65 },
text: ': number',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 68, end: 76 },
text: ': string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 87, end: 102 },
text: ': null | string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 115, end: 124 },
text: '!: string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 139, end: 149 },
text: ': usingAny',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
}
]);
});
it('declare statement', () => {
const analyzer = new TypeAnalyzer(`
declare const a: number;
declare function b(): number;
declare class c {}
declare module d {}
declare namespace e {}
declare enum f {}
declare global {}
declare module 'g' {}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 25 },
text: 'declare const a: number;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 26, end: 55 },
text: 'declare function b(): number;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 56, end: 74 },
text: 'declare class c {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 75, end: 94 },
text: 'declare module d {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 95, end: 117 },
text: 'declare namespace e {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 118, end: 135 },
text: 'declare enum f {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 136, end: 153 },
text: 'declare global {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 154, end: 175 },
text: "declare module 'g' {}",
kind: TYPE_KIND.DECLARE_STATEMENT
}
]);
});
it('as expression', () => {
const analyzer = new TypeAnalyzer(`
const a = 1 as number;
const b = 1 as number | string;
const c = 1 as number | string | null as 111 as 3;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 12, end: 22 },
text: ' as number',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 35, end: 54 },
text: ' as number | string',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 67, end: 93 },
text: ' as number | string | null',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 93, end: 100 },
text: ' as 111',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 100, end: 105 },
text: ' as 3',
kind: TYPE_KIND.AS_ASSERTION
}
]);
});
it('satisfies expression', () => {
const analyzer = new TypeAnalyzer(`
const a = 1 satisfies number;
const b = 1 satisfies number | string;
const c = 1 satisfies number | string | null;
const d = () => {
return 333 satisfies any
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 12, end: 29 },
text: ' satisfies number',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 42, end: 68 },
text: ' satisfies number | string',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 81, end: 114 },
text: ' satisfies number | string | null',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 147, end: 161 },
text: ' satisfies any',
kind: TYPE_KIND.SATISFIES_OPERATOR
}
]);
});
it('satisfies & as', () => {
const analyzer = new TypeAnalyzer(`
const a = {} satisfies {} as const;
const b = {} as const satisfies {};
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
kind: TYPE_KIND.SATISFIES_OPERATOR,
range: { pos: 13, end: 26 },
text: ' satisfies {}'
},
{
kind: TYPE_KIND.AS_ASSERTION,
range: { pos: 26, end: 35 },
text: ' as const'
},
{
kind: TYPE_KIND.AS_ASSERTION,
range: { pos: 49, end: 58 },
text: ' as const'
},
{
kind: TYPE_KIND.SATISFIES_OPERATOR,
range: { pos: 58, end: 71 },
text: ' satisfies {}'
}
]);
});
it('type assertion', () => {
const analyzer = new TypeAnalyzer(`
const a =<number>1;
const b = <number | string>1;
const c = <number | string | null>1;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 10, end: 18 },
text: '<number>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{
range: { pos: 31, end: 48 },
text: '<number | string>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{
range: { pos: 61, end: 85 },
text: '<number | string | null>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
}
]);
});
it('call expression', () => {
const analyzer = new TypeAnalyzer(`
b<number>();
new d<number, string>();
f<number, string, null>();
new Set<PersistListener<S>>()
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 2, end: 10 },
text: '<number>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { pos: 19, end: 35 },
text: '<number, string>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { pos: 40, end: 62 },
text: '<number, string, null>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { end: 93, pos: 73 },
text: '<PersistListener<S>>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
}
]);
});
describe('class', () => {
it('property type definition', () => {
const analyzer = new TypeAnalyzer(`
class A {
a: number;
public b: string;
protected c: {
e: 1
}
private d: () => void = () => {}
e!: boolean;
g?: string;
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 22 },
text: ': number',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 34, end: 42 },
text: ': string',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 57, end: 73 },
text: ': {\n e: 1\n }',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 85, end: 97 },
text: ': () => void',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 112, end: 122 },
text: '!: boolean',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { end: 136, pos: 127 },
text: '?: string',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
}
]);
});
it('method declaration', () => {
const analyzer = new TypeAnalyzer(`
class A {
public a(p: 1): boolean;
public a(p: 2): number;
public a(p: 1 | 2): boolean | number {
return '' as any;
}
public b(a: number): string;
protected c(b: number | 1): {
e: 1
}
protected get compileUtils(): any | 'compileUtils' {
const abc = {
getConfig: (): ReadonlyDeep<InnerCompilerConfig> => {
return getCurrentCompileConfig() as any as unknown;
},
b(): void {}
}
}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 11, end: 37 },
text: ' public a(p: 1): boolean;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 38, end: 63 },
text: ' public a(p: 2): number;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 76, end: 83 },
text: ': 1 | 2',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 84, end: 102 },
text: ': boolean | number',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 118, end: 125 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 131, end: 161 },
text: ' public b(a: number): string;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 162, end: 206 },
text: ' protected c(b: number | 1): {\n e: 1\n }',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 237, end: 259 },
text: ": any | 'compileUtils'",
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 299, end: 334 },
text: ': ReadonlyDeep<InnerCompilerConfig>',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 380, end: 387 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 387, end: 398 },
text: ' as unknown',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 418, end: 424 },
text: ': void',
kind: TYPE_KIND.FUNCTION_RETURN
}
]);
});
it('constructor', () => {
const analyzer = new TypeAnalyzer(`
class A {
constructor(a: number) {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 26, end: 34 },
text: ': number',
kind: TYPE_KIND.FUNCTION_PARAMETER
}
]);
});
});
describe('tsx', () => {
it('generic arguments', () => {
const analyzer = new TypeAnalyzer(
`
const a = <Component<number> />
const b = <A<number, string> />
const c = <A<number, string, null> />
const d = <A
<number, string, null, 1, 2 | 3, [22]>
/>
`,
true
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 23, end: 31 },
text: '<number>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 49, end: 65 },
text: '<number, string>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 83, end: 105 },
text: '<number, string, null>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 128, end: 166 },
text: '<number, string, null, 1, 2 | 3, [22]>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
}
]);
});
it('integration', () => {
const analyzer = new TypeAnalyzer(
`
const a = <Component<number>
name
test={111 as any}
t2={\`...\${11 as string}\`}
{...test as object}
/>
`,
true
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 23, end: 31 },
text: '<number>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 58, end: 65 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 85, end: 95 },
text: ' as string',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 113, end: 123 },
text: ' as object',
kind: TYPE_KIND.AS_ASSERTION
}
]);
});
});
| src/core/helpers/type-analyzer/index.test.ts | xlboy-ts-type-hidden-a749a29 | [
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " ]);\n }\n }\n }\n private pushAnalyzedType(\n kind: AnalyzedType['kind'],\n range: [pos: number, end: number]\n ) {\n const [pos, end] = range;\n const text = this.sourceFile.text.slice(pos, end);",
"score": 0.786308765411377
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " const prevNode = children[startIndex - 1];\n // >\n const nextNode = children[endIndex + 1];\n return this.pushAnalyzedType(TYPE_KIND.TSX_COMPONENT_GENERIC, [\n prevNode.end - 1,\n nextNode.pos\n ]);\n }\n }\n // [class] context: `class A { a?: number }`, get `?: number`",
"score": 0.7735636830329895
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " return this.pushAnalyzedType(TYPE_KIND.FUNCTION_CALL_GENERIC, [\n prevNode.end - 1,\n nextNode.pos + 1\n ]);\n }\n }\n // context: `<number>a`, get `<number>`\n function handleParentTypeAssertionExpr(\n this: TypeAnalyzer,\n parent: ts.TypeAssertion,",
"score": 0.7685744762420654
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " return this.pushAnalyzedType(TYPE_KIND.ANGLE_BRACKETS_ASSERTION, [\n prevNode.end - 1,\n nextNode.pos + 1\n ]);\n }\n // context = `a as number` | `a satisfies number`, curChild = `number`\n function handleParentAsOrSatisfiesExpr(\n this: TypeAnalyzer,\n parent: ts.AsExpression | ts.SatisfiesExpression,\n curChild: ts.Node",
"score": 0.755550742149353
},
{
"filename": "src/core/helpers/type-analyzer/constants.ts",
"retrieved_chunk": "export enum TYPE_KIND {\n /**\n * ```ts\n * type A = ({ ... } & { ... }) | string[]\n * ```\n * ⏭️ `type A = ({ ... } & { ... }) | string[]`\n */\n TYPE_ALIAS = 'type-alias',\n /**\n * ```ts",
"score": 0.7524573802947998
}
] | typescript | TYPE_KIND.DECLARE_STATEMENT
},
{ |
import { describe, expect, it } from 'vitest';
import { TypeAnalyzer } from '.';
import { TYPE_KIND } from './constants';
describe('function', () => {
it('overloading', () => {
const analyzer = new TypeAnalyzer(`
const t = 1
function a<B extends 222>(): void;
function b<A>(o: A): string;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 48 },
text: 'function a<B extends 222>(): void;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 49, end: 77 },
text: 'function b<A>(o: A): string;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
}
]);
});
it('function-generic-definition - a`<B extends ...>`()', () => {
const analyzer = new TypeAnalyzer(
`
function a<B extends 111, C extends 111>() {}
const b = <B extends 222, C extends 222>() => {};
const c = function<B extends 333, C extends 333>() {}
const d = {
a<B extends 444, C extends 444>() {}
}
`
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 11, end: 41 },
text: '<B extends 111, C extends 111>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 57, end: 87 },
text: '<B extends 222, C extends 222>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 115, end: 145 },
text: '<B extends 333, C extends 333>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 166, end: 196 },
text: '<B extends 444, C extends 444>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
}
]);
});
it('function-parameter - (`a: number, b: string, ...`)', () => {
const analyzer = new TypeAnalyzer(`
function a(a1: A111, a2?: A222) {}
const b = (b1: B111, b2?: B222) => {};
const c = function(c1: C111, c2?: C222) {}
const d = {
e(d1: E111, d2?: E222) {}
f: (f1: F111, f2?: F222) => {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 20 },
text: ': A111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 24, end: 31 },
text: '?: A222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 49, end: 55 },
text: ': B111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 59, end: 66 },
text: '?: B222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 96, end: 102 },
text: ': C111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 106, end: 113 },
text: '?: C222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 136, end: 142 },
text: ': E111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 146, end: 153 },
text: '?: E222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 166, end: 172 },
text: ': F111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 176, end: 183 },
text: '?: F222',
kind: TYPE_KIND.FUNCTION_PARAMETER
}
]);
});
it('function-return - ()`: number`', () => {
const analyzer = new TypeAnalyzer(`n
function a(): A111 {}
const b = (): B111 => {};
const c = function(): C111 {}
const d = {
d(): D111 {}
e: (): E111 => {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 20 },
text: ': A111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 36, end: 42 },
text: ': B111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 70, end: 76 },
text: ': C111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 97, end: 103 },
text: ': D111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 114, end: 120 },
text: ': E111',
kind: TYPE_KIND.FUNCTION_RETURN
}
]);
});
it('function-type-predicate - (a: any)`: asserts a is ...)`', () => {
const analyzer = new TypeAnalyzer(`
function a(value): asserts a is aaa {}
const b = (value): asserts b is bbb => {};
const c = function (value): asserts d is ddd {};
const d = {
e(value): asserts e is eee {},
f: (value): asserts f is fff => {}
};
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 18, end: 36 },
text: ': asserts a is aaa',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 58, end: 76 },
text: ': asserts b is bbb',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 111, end: 129 },
text: ': asserts d is ddd',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 157, end: 175 },
text: ': asserts e is eee',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 192, end: 210 },
text: ': asserts f is fff',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
}
]);
});
});
it('interface', () => {
const analyzer = new TypeAnalyzer(`
interface t {};
interface A111 {
a: number;
b: string;
c: {
e: 1
}
}`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 15 },
text: 'interface t {}',
kind: TYPE_KIND.INTERFACE
},
{
range: { pos: 17, end: 81 },
text: 'interface A111 {\n a: number;\n b: string;\n c: {\n e: 1\n }\n}',
kind: TYPE_KIND.INTERFACE
}
]);
});
it('type alias', () => {
const analyzer = new TypeAnalyzer(`
type t = number;
type A111 = {
a: number;
} | 123 & {}`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 17 },
text: 'type t = number;',
kind: TYPE_KIND.TYPE_ALIAS
},
{
range: { pos: 18, end: 58 },
text: 'type A111 = {\n a: number;\n} | 123 & {}',
kind: TYPE_KIND.TYPE_ALIAS
}
]);
});
it('variable type definition', () => {
const analyzer = new TypeAnalyzer(`
const a = 1;
declare const b: number, c: string;
const d: number, e: string;
const eee: null | string = ''
let fff!: string = ''
using ggg: usingAny = fn();
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 49 },
text: 'declare const b: number, c: string;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 57, end: 65 },
text: ': number',
kind: | TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{ |
range: { pos: 68, end: 76 },
text: ': string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 87, end: 102 },
text: ': null | string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 115, end: 124 },
text: '!: string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 139, end: 149 },
text: ': usingAny',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
}
]);
});
it('declare statement', () => {
const analyzer = new TypeAnalyzer(`
declare const a: number;
declare function b(): number;
declare class c {}
declare module d {}
declare namespace e {}
declare enum f {}
declare global {}
declare module 'g' {}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 25 },
text: 'declare const a: number;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 26, end: 55 },
text: 'declare function b(): number;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 56, end: 74 },
text: 'declare class c {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 75, end: 94 },
text: 'declare module d {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 95, end: 117 },
text: 'declare namespace e {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 118, end: 135 },
text: 'declare enum f {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 136, end: 153 },
text: 'declare global {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 154, end: 175 },
text: "declare module 'g' {}",
kind: TYPE_KIND.DECLARE_STATEMENT
}
]);
});
it('as expression', () => {
const analyzer = new TypeAnalyzer(`
const a = 1 as number;
const b = 1 as number | string;
const c = 1 as number | string | null as 111 as 3;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 12, end: 22 },
text: ' as number',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 35, end: 54 },
text: ' as number | string',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 67, end: 93 },
text: ' as number | string | null',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 93, end: 100 },
text: ' as 111',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 100, end: 105 },
text: ' as 3',
kind: TYPE_KIND.AS_ASSERTION
}
]);
});
it('satisfies expression', () => {
const analyzer = new TypeAnalyzer(`
const a = 1 satisfies number;
const b = 1 satisfies number | string;
const c = 1 satisfies number | string | null;
const d = () => {
return 333 satisfies any
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 12, end: 29 },
text: ' satisfies number',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 42, end: 68 },
text: ' satisfies number | string',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 81, end: 114 },
text: ' satisfies number | string | null',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 147, end: 161 },
text: ' satisfies any',
kind: TYPE_KIND.SATISFIES_OPERATOR
}
]);
});
it('satisfies & as', () => {
const analyzer = new TypeAnalyzer(`
const a = {} satisfies {} as const;
const b = {} as const satisfies {};
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
kind: TYPE_KIND.SATISFIES_OPERATOR,
range: { pos: 13, end: 26 },
text: ' satisfies {}'
},
{
kind: TYPE_KIND.AS_ASSERTION,
range: { pos: 26, end: 35 },
text: ' as const'
},
{
kind: TYPE_KIND.AS_ASSERTION,
range: { pos: 49, end: 58 },
text: ' as const'
},
{
kind: TYPE_KIND.SATISFIES_OPERATOR,
range: { pos: 58, end: 71 },
text: ' satisfies {}'
}
]);
});
it('type assertion', () => {
const analyzer = new TypeAnalyzer(`
const a =<number>1;
const b = <number | string>1;
const c = <number | string | null>1;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 10, end: 18 },
text: '<number>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{
range: { pos: 31, end: 48 },
text: '<number | string>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{
range: { pos: 61, end: 85 },
text: '<number | string | null>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
}
]);
});
it('call expression', () => {
const analyzer = new TypeAnalyzer(`
b<number>();
new d<number, string>();
f<number, string, null>();
new Set<PersistListener<S>>()
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 2, end: 10 },
text: '<number>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { pos: 19, end: 35 },
text: '<number, string>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { pos: 40, end: 62 },
text: '<number, string, null>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { end: 93, pos: 73 },
text: '<PersistListener<S>>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
}
]);
});
describe('class', () => {
it('property type definition', () => {
const analyzer = new TypeAnalyzer(`
class A {
a: number;
public b: string;
protected c: {
e: 1
}
private d: () => void = () => {}
e!: boolean;
g?: string;
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 22 },
text: ': number',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 34, end: 42 },
text: ': string',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 57, end: 73 },
text: ': {\n e: 1\n }',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 85, end: 97 },
text: ': () => void',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 112, end: 122 },
text: '!: boolean',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { end: 136, pos: 127 },
text: '?: string',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
}
]);
});
it('method declaration', () => {
const analyzer = new TypeAnalyzer(`
class A {
public a(p: 1): boolean;
public a(p: 2): number;
public a(p: 1 | 2): boolean | number {
return '' as any;
}
public b(a: number): string;
protected c(b: number | 1): {
e: 1
}
protected get compileUtils(): any | 'compileUtils' {
const abc = {
getConfig: (): ReadonlyDeep<InnerCompilerConfig> => {
return getCurrentCompileConfig() as any as unknown;
},
b(): void {}
}
}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 11, end: 37 },
text: ' public a(p: 1): boolean;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 38, end: 63 },
text: ' public a(p: 2): number;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 76, end: 83 },
text: ': 1 | 2',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 84, end: 102 },
text: ': boolean | number',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 118, end: 125 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 131, end: 161 },
text: ' public b(a: number): string;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 162, end: 206 },
text: ' protected c(b: number | 1): {\n e: 1\n }',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 237, end: 259 },
text: ": any | 'compileUtils'",
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 299, end: 334 },
text: ': ReadonlyDeep<InnerCompilerConfig>',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 380, end: 387 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 387, end: 398 },
text: ' as unknown',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 418, end: 424 },
text: ': void',
kind: TYPE_KIND.FUNCTION_RETURN
}
]);
});
it('constructor', () => {
const analyzer = new TypeAnalyzer(`
class A {
constructor(a: number) {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 26, end: 34 },
text: ': number',
kind: TYPE_KIND.FUNCTION_PARAMETER
}
]);
});
});
describe('tsx', () => {
it('generic arguments', () => {
const analyzer = new TypeAnalyzer(
`
const a = <Component<number> />
const b = <A<number, string> />
const c = <A<number, string, null> />
const d = <A
<number, string, null, 1, 2 | 3, [22]>
/>
`,
true
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 23, end: 31 },
text: '<number>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 49, end: 65 },
text: '<number, string>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 83, end: 105 },
text: '<number, string, null>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 128, end: 166 },
text: '<number, string, null, 1, 2 | 3, [22]>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
}
]);
});
it('integration', () => {
const analyzer = new TypeAnalyzer(
`
const a = <Component<number>
name
test={111 as any}
t2={\`...\${11 as string}\`}
{...test as object}
/>
`,
true
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 23, end: 31 },
text: '<number>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 58, end: 65 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 85, end: 95 },
text: ' as string',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 113, end: 123 },
text: ' as object',
kind: TYPE_KIND.AS_ASSERTION
}
]);
});
});
| src/core/helpers/type-analyzer/index.test.ts | xlboy-ts-type-hidden-a749a29 | [
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " const prevNode = children[startIndex - 1];\n // >\n const nextNode = children[endIndex + 1];\n return this.pushAnalyzedType(TYPE_KIND.TSX_COMPONENT_GENERIC, [\n prevNode.end - 1,\n nextNode.pos\n ]);\n }\n }\n // [class] context: `class A { a?: number }`, get `?: number`",
"score": 0.7947178483009338
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " return this.pushAnalyzedType(TYPE_KIND.FUNCTION_CALL_GENERIC, [\n prevNode.end - 1,\n nextNode.pos + 1\n ]);\n }\n }\n // context: `<number>a`, get `<number>`\n function handleParentTypeAssertionExpr(\n this: TypeAnalyzer,\n parent: ts.TypeAssertion,",
"score": 0.7893145084381104
},
{
"filename": "src/core/helpers/type-analyzer/constants.ts",
"retrieved_chunk": "export enum TYPE_KIND {\n /**\n * ```ts\n * type A = ({ ... } & { ... }) | string[]\n * ```\n * ⏭️ `type A = ({ ... } & { ... }) | string[]`\n */\n TYPE_ALIAS = 'type-alias',\n /**\n * ```ts",
"score": 0.7819120287895203
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " ]);\n }\n }\n }\n private pushAnalyzedType(\n kind: AnalyzedType['kind'],\n range: [pos: number, end: number]\n ) {\n const [pos, end] = range;\n const text = this.sourceFile.text.slice(pos, end);",
"score": 0.774000883102417
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " ) {\n const kind =\n child.kind === ts.SyntaxKind.InterfaceDeclaration\n ? TYPE_KIND.INTERFACE\n : TYPE_KIND.TYPE_ALIAS;\n this.pushAnalyzedType(kind, [child.pos, child.end]);\n }\n // context = `a: number`, curChild = `number`\n function handleParentParameter(\n this: TypeAnalyzer,",
"score": 0.7692480683326721
}
] | typescript | TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{ |
import { describe, expect, it } from 'vitest';
import { TypeAnalyzer } from '.';
import { TYPE_KIND } from './constants';
describe('function', () => {
it('overloading', () => {
const analyzer = new TypeAnalyzer(`
const t = 1
function a<B extends 222>(): void;
function b<A>(o: A): string;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 48 },
text: 'function a<B extends 222>(): void;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 49, end: 77 },
text: 'function b<A>(o: A): string;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
}
]);
});
it('function-generic-definition - a`<B extends ...>`()', () => {
const analyzer = new TypeAnalyzer(
`
function a<B extends 111, C extends 111>() {}
const b = <B extends 222, C extends 222>() => {};
const c = function<B extends 333, C extends 333>() {}
const d = {
a<B extends 444, C extends 444>() {}
}
`
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 11, end: 41 },
text: '<B extends 111, C extends 111>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 57, end: 87 },
text: '<B extends 222, C extends 222>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 115, end: 145 },
text: '<B extends 333, C extends 333>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 166, end: 196 },
text: '<B extends 444, C extends 444>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
}
]);
});
it('function-parameter - (`a: number, b: string, ...`)', () => {
const analyzer = new TypeAnalyzer(`
function a(a1: A111, a2?: A222) {}
const b = (b1: B111, b2?: B222) => {};
const c = function(c1: C111, c2?: C222) {}
const d = {
e(d1: E111, d2?: E222) {}
f: (f1: F111, f2?: F222) => {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 20 },
text: ': A111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 24, end: 31 },
text: '?: A222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 49, end: 55 },
text: ': B111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 59, end: 66 },
text: '?: B222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 96, end: 102 },
text: ': C111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 106, end: 113 },
text: '?: C222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 136, end: 142 },
text: ': E111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 146, end: 153 },
text: '?: E222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 166, end: 172 },
text: ': F111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 176, end: 183 },
text: '?: F222',
kind: TYPE_KIND.FUNCTION_PARAMETER
}
]);
});
it('function-return - ()`: number`', () => {
const analyzer = new TypeAnalyzer(`n
function a(): A111 {}
const b = (): B111 => {};
const c = function(): C111 {}
const d = {
d(): D111 {}
e: (): E111 => {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 20 },
text: ': A111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 36, end: 42 },
text: ': B111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 70, end: 76 },
text: ': C111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 97, end: 103 },
text: ': D111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 114, end: 120 },
text: ': E111',
kind: TYPE_KIND.FUNCTION_RETURN
}
]);
});
it('function-type-predicate - (a: any)`: asserts a is ...)`', () => {
const analyzer = new TypeAnalyzer(`
function a(value): asserts a is aaa {}
const b = (value): asserts b is bbb => {};
const c = function (value): asserts d is ddd {};
const d = {
e(value): asserts e is eee {},
f: (value): asserts f is fff => {}
};
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 18, end: 36 },
text: ': asserts a is aaa',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 58, end: 76 },
text: ': asserts b is bbb',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 111, end: 129 },
text: ': asserts d is ddd',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 157, end: 175 },
text: ': asserts e is eee',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 192, end: 210 },
text: ': asserts f is fff',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
}
]);
});
});
it('interface', () => {
const analyzer = new TypeAnalyzer(`
interface t {};
interface A111 {
a: number;
b: string;
c: {
e: 1
}
}`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 15 },
text: 'interface t {}',
kind: TYPE_KIND.INTERFACE
},
{
range: { pos: 17, end: 81 },
text: 'interface A111 {\n a: number;\n b: string;\n c: {\n e: 1\n }\n}',
kind: TYPE_KIND.INTERFACE
}
]);
});
it('type alias', () => {
const analyzer = new TypeAnalyzer(`
type t = number;
type A111 = {
a: number;
} | 123 & {}`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 17 },
text: 'type t = number;',
kind: TYPE_KIND.TYPE_ALIAS
},
{
range: { pos: 18, end: 58 },
text: 'type A111 = {\n a: number;\n} | 123 & {}',
kind: TYPE_KIND.TYPE_ALIAS
}
]);
});
it('variable type definition', () => {
const analyzer = new TypeAnalyzer(`
const a = 1;
declare const b: number, c: string;
const d: number, e: string;
const eee: null | string = ''
let fff!: string = ''
using ggg: usingAny = fn();
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 49 },
text: 'declare const b: number, c: string;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 57, end: 65 },
text: ': number',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 68, end: 76 },
text: ': string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 87, end: 102 },
text: ': null | string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 115, end: 124 },
text: '!: string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 139, end: 149 },
text: ': usingAny',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
}
]);
});
it('declare statement', () => {
const analyzer = new TypeAnalyzer(`
declare const a: number;
declare function b(): number;
declare class c {}
declare module d {}
declare namespace e {}
declare enum f {}
declare global {}
declare module 'g' {}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 25 },
text: 'declare const a: number;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 26, end: 55 },
text: 'declare function b(): number;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 56, end: 74 },
text: 'declare class c {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 75, end: 94 },
text: 'declare module d {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 95, end: 117 },
text: 'declare namespace e {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 118, end: 135 },
text: 'declare enum f {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 136, end: 153 },
text: 'declare global {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 154, end: 175 },
text: "declare module 'g' {}",
kind: TYPE_KIND.DECLARE_STATEMENT
}
]);
});
it('as expression', () => {
const analyzer = new TypeAnalyzer(`
const a = 1 as number;
const b = 1 as number | string;
const c = 1 as number | string | null as 111 as 3;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 12, end: 22 },
text: ' as number',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 35, end: 54 },
text: ' as number | string',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 67, end: 93 },
text: ' as number | string | null',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 93, end: 100 },
text: ' as 111',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 100, end: 105 },
text: ' as 3',
kind: TYPE_KIND.AS_ASSERTION
}
]);
});
it('satisfies expression', () => {
const analyzer = new TypeAnalyzer(`
const a = 1 satisfies number;
const b = 1 satisfies number | string;
const c = 1 satisfies number | string | null;
const d = () => {
return 333 satisfies any
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 12, end: 29 },
text: ' satisfies number',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 42, end: 68 },
text: ' satisfies number | string',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 81, end: 114 },
text: ' satisfies number | string | null',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 147, end: 161 },
text: ' satisfies any',
kind: TYPE_KIND.SATISFIES_OPERATOR
}
]);
});
it('satisfies & as', () => {
const analyzer = new TypeAnalyzer(`
const a = {} satisfies {} as const;
const b = {} as const satisfies {};
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
kind: TYPE_KIND.SATISFIES_OPERATOR,
range: { pos: 13, end: 26 },
text: ' satisfies {}'
},
{
kind: TYPE_KIND.AS_ASSERTION,
range: { pos: 26, end: 35 },
text: ' as const'
},
{
kind: TYPE_KIND.AS_ASSERTION,
range: { pos: 49, end: 58 },
text: ' as const'
},
{
kind: TYPE_KIND.SATISFIES_OPERATOR,
range: { pos: 58, end: 71 },
text: ' satisfies {}'
}
]);
});
it('type assertion', () => {
const analyzer = new TypeAnalyzer(`
const a =<number>1;
const b = <number | string>1;
const c = <number | string | null>1;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 10, end: 18 },
text: '<number>',
| kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{ |
range: { pos: 31, end: 48 },
text: '<number | string>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{
range: { pos: 61, end: 85 },
text: '<number | string | null>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
}
]);
});
it('call expression', () => {
const analyzer = new TypeAnalyzer(`
b<number>();
new d<number, string>();
f<number, string, null>();
new Set<PersistListener<S>>()
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 2, end: 10 },
text: '<number>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { pos: 19, end: 35 },
text: '<number, string>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { pos: 40, end: 62 },
text: '<number, string, null>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { end: 93, pos: 73 },
text: '<PersistListener<S>>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
}
]);
});
describe('class', () => {
it('property type definition', () => {
const analyzer = new TypeAnalyzer(`
class A {
a: number;
public b: string;
protected c: {
e: 1
}
private d: () => void = () => {}
e!: boolean;
g?: string;
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 22 },
text: ': number',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 34, end: 42 },
text: ': string',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 57, end: 73 },
text: ': {\n e: 1\n }',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 85, end: 97 },
text: ': () => void',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 112, end: 122 },
text: '!: boolean',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { end: 136, pos: 127 },
text: '?: string',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
}
]);
});
it('method declaration', () => {
const analyzer = new TypeAnalyzer(`
class A {
public a(p: 1): boolean;
public a(p: 2): number;
public a(p: 1 | 2): boolean | number {
return '' as any;
}
public b(a: number): string;
protected c(b: number | 1): {
e: 1
}
protected get compileUtils(): any | 'compileUtils' {
const abc = {
getConfig: (): ReadonlyDeep<InnerCompilerConfig> => {
return getCurrentCompileConfig() as any as unknown;
},
b(): void {}
}
}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 11, end: 37 },
text: ' public a(p: 1): boolean;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 38, end: 63 },
text: ' public a(p: 2): number;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 76, end: 83 },
text: ': 1 | 2',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 84, end: 102 },
text: ': boolean | number',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 118, end: 125 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 131, end: 161 },
text: ' public b(a: number): string;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 162, end: 206 },
text: ' protected c(b: number | 1): {\n e: 1\n }',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 237, end: 259 },
text: ": any | 'compileUtils'",
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 299, end: 334 },
text: ': ReadonlyDeep<InnerCompilerConfig>',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 380, end: 387 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 387, end: 398 },
text: ' as unknown',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 418, end: 424 },
text: ': void',
kind: TYPE_KIND.FUNCTION_RETURN
}
]);
});
it('constructor', () => {
const analyzer = new TypeAnalyzer(`
class A {
constructor(a: number) {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 26, end: 34 },
text: ': number',
kind: TYPE_KIND.FUNCTION_PARAMETER
}
]);
});
});
describe('tsx', () => {
it('generic arguments', () => {
const analyzer = new TypeAnalyzer(
`
const a = <Component<number> />
const b = <A<number, string> />
const c = <A<number, string, null> />
const d = <A
<number, string, null, 1, 2 | 3, [22]>
/>
`,
true
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 23, end: 31 },
text: '<number>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 49, end: 65 },
text: '<number, string>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 83, end: 105 },
text: '<number, string, null>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 128, end: 166 },
text: '<number, string, null, 1, 2 | 3, [22]>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
}
]);
});
it('integration', () => {
const analyzer = new TypeAnalyzer(
`
const a = <Component<number>
name
test={111 as any}
t2={\`...\${11 as string}\`}
{...test as object}
/>
`,
true
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 23, end: 31 },
text: '<number>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 58, end: 65 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 85, end: 95 },
text: ' as string',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 113, end: 123 },
text: ' as object',
kind: TYPE_KIND.AS_ASSERTION
}
]);
});
});
| src/core/helpers/type-analyzer/index.test.ts | xlboy-ts-type-hidden-a749a29 | [
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " const prevNode = children[startIndex - 1];\n // >\n const nextNode = children[endIndex + 1];\n return this.pushAnalyzedType(TYPE_KIND.TSX_COMPONENT_GENERIC, [\n prevNode.end - 1,\n nextNode.pos\n ]);\n }\n }\n // [class] context: `class A { a?: number }`, get `?: number`",
"score": 0.7941109538078308
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " ]);\n }\n }\n }\n private pushAnalyzedType(\n kind: AnalyzedType['kind'],\n range: [pos: number, end: number]\n ) {\n const [pos, end] = range;\n const text = this.sourceFile.text.slice(pos, end);",
"score": 0.7913469076156616
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " return this.pushAnalyzedType(TYPE_KIND.FUNCTION_CALL_GENERIC, [\n prevNode.end - 1,\n nextNode.pos + 1\n ]);\n }\n }\n // context: `<number>a`, get `<number>`\n function handleParentTypeAssertionExpr(\n this: TypeAnalyzer,\n parent: ts.TypeAssertion,",
"score": 0.7863007187843323
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " return this.pushAnalyzedType(TYPE_KIND.ANGLE_BRACKETS_ASSERTION, [\n prevNode.end - 1,\n nextNode.pos + 1\n ]);\n }\n // context = `a as number` | `a satisfies number`, curChild = `number`\n function handleParentAsOrSatisfiesExpr(\n this: TypeAnalyzer,\n parent: ts.AsExpression | ts.SatisfiesExpression,\n curChild: ts.Node",
"score": 0.7828099727630615
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " }\n return this.pushAnalyzedType(TYPE_KIND.FUNCTION_OVERLOAD, [\n startPos,\n parent.end\n ]);\n } else {\n return this.pushAnalyzedType(TYPE_KIND.FUNCTION_OVERLOAD, [\n parent.pos,\n parent.end\n ]);",
"score": 0.7539591789245605
}
] | typescript | kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{ |
import chalk from 'chalk';
import { stdout as output } from 'node:process';
import { OpenAIEmbeddings } from 'langchain/embeddings/openai';
import { HNSWLib } from 'langchain/vectorstores/hnswlib';
import { JSONLoader } from 'langchain/document_loaders/fs/json';
import { TextLoader } from 'langchain/document_loaders/fs/text';
import { PDFLoader } from 'langchain/document_loaders/fs/pdf';
import { DocxLoader } from 'langchain/document_loaders/fs/docx';
import { EPubLoader } from 'langchain/document_loaders/fs/epub';
import { CSVLoader } from 'langchain/document_loaders/fs/csv';
import ora from 'ora';
import { MarkdownTextSplitter, RecursiveCharacterTextSplitter } from 'langchain/text_splitter';
import { Document } from 'langchain/document';
import path from 'path';
import { YoutubeTranscript } from 'youtube-transcript';
import getDirectoryListWithDetails from '../utils/getDirectoryListWithDetails.js';
import createDirectory from '../utils/createDirectory.js';
import { getConfig, getDefaultOraOptions, getProjectRoot, setCurrentVectorStoreDatabasePath } from '../config/index.js';
import getDirectoryFiles from '../utils/getDirectoryFiles.js';
import WebCrawler from './crawler.js';
const projectRootDir = getProjectRoot();
const defaultOraOptions = getDefaultOraOptions(output);
/**
* This function loads and splits a file based on its extension using different loaders and text
* splitters.
* @param {string} filePath - A string representing the path to the file that needs to be loaded and
* split into documents.
* @returns The function `loadAndSplitFile` returns a Promise that resolves to an array of `Document`
* objects, where each `Document` represents a split portion of the input file. The type of the
* `Document` object is `Document<Record<string, unknown>>`, which means it has a generic type
* parameter that is an object with string keys and unknown values.
*/
async function loadAndSplitFile(filePath: string): Promise<Document<Record<string, unknown>>[]> {
const fileExtension = path.extname(filePath);
let loader;
let documents: Document<Record<string, unknown>>[];
switch (fileExtension) {
case '.json':
loader = new JSONLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.txt':
loader = new TextLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.md':
loader = new TextLoader(filePath);
documents = await loader.loadAndSplit(new MarkdownTextSplitter());
break;
case '.pdf':
loader = new PDFLoader(filePath, { splitPages: false });
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.docx':
loader = new DocxLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.csv':
loader = new CSVLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.epub':
loader = new EPubLoader(filePath, { splitChapters: false });
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
default:
throw new Error(`Unsupported file extension: ${fileExtension}`);
}
return documents;
}
/**
* This function loads or creates a vector store using HNSWLib and OpenAIEmbeddings.
* @returns The function `loadOrCreateVectorStore` returns a Promise that resolves to an instance of
* the `HNSWLib` class, which is a vector store used for storing and searching high-dimensional
* vectors.
*/
async function loadOrCreateVectorStore(): Promise<HNSWLib> {
let vectorStore: HNSWLib;
let spinner;
| await createDirectory(getConfig().currentVectorStoreDatabasePath); |
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));
} catch {
spinner = ora({
...defaultOraOptions,
text: chalk.blue(`Creating new Context Vector Store in the ${dbDirectory} directory`),
}).start();
const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');
const filesToAdd = await getDirectoryFiles(docsDirectory);
const documents = await Promise.all(filesToAdd.map((filePath) => loadAndSplitFile(filePath)));
const flattenedDocuments = documents.reduce((acc, val) => acc.concat(val), []);
vectorStore = await HNSWLib.fromDocuments(flattenedDocuments, new OpenAIEmbeddings({ maxConcurrency: 5 }));
await vectorStore.save(dbDirectory);
spinner.succeed();
}
return vectorStore;
}
const contextVectorStore = await loadOrCreateVectorStore();
const contextWrapper = {
contextInstance: contextVectorStore,
};
/**
* This function loads or creates a new empty Context Vector Store using HNSWLib and OpenAIEmbeddings.
* @returns a Promise that resolves to an instance of the HNSWLib class, which represents a
* hierarchical navigable small world graph used for nearest neighbor search. The instance is either
* loaded from an existing directory or created as a new empty Context Vector Store with specified
* parameters.
*/
async function loadOrCreateEmptyVectorStore(subDirectory: string): Promise<HNSWLib> {
let vectorStore: HNSWLib;
let spinner;
const newContextVectorStorePath = path.join(projectRootDir, process.env.VECTOR_STORE_BASE_DIR || 'db', subDirectory);
await createDirectory(newContextVectorStorePath);
setCurrentVectorStoreDatabasePath(newContextVectorStorePath);
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));
output.write(chalk.blue(`Using Context Vector Store in the ${dbDirectory} directory\n`));
} catch {
spinner = ora({
...defaultOraOptions,
text: chalk.blue(`Creating new empty Context Vector Store in the ${dbDirectory} directory`),
}).start();
vectorStore = new HNSWLib(new OpenAIEmbeddings({ maxConcurrency: 5 }), {
space: 'cosine',
numDimensions: 1536,
});
spinner.succeed();
output.write(
chalk.red.bold(
`\nThe Context Vector Store is currently empty and unsaved, add context to is using \`/add-docs\`, \`/add-url\` or \`/add-youtube\``
)
);
}
contextWrapper.contextInstance = vectorStore;
return vectorStore;
}
async function getContextVectorStore() {
return contextWrapper.contextInstance;
}
/**
* This function adds documents to a context vector store and saves them.
* @param {string[]} filePaths - The `filePaths` parameter is an array of strings representing the file
* paths of the documents that need to be added to the Context Vector Store.
* @returns nothing (`undefined`).
*/
async function addDocument(filePaths: string[]) {
let spinner;
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
spinner = ora({ ...defaultOraOptions, text: `Adding files to the Context Vector Store` }).start();
const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');
const documents = await Promise.all(
filePaths.map((filePath) => loadAndSplitFile(path.join(docsDirectory, filePath)))
);
const flattenedDocuments = documents.reduce((acc, val) => acc.concat(val), []);
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(flattenedDocuments);
await vectorStore.save(dbDirectory);
spinner.succeed();
return;
} catch (error) {
if (spinner) {
spinner.fail(chalk.red(error));
} else {
output.write(chalk.red(error));
}
}
}
/**
* The function adds a YouTube video transcript to a Context Vector Store.
* @param {string} URLOrVideoID - The URLOrVideoID parameter is a string that represents either the URL
* or the video ID of a YouTube video.
* @returns Nothing is being returned explicitly in the code, but the function is expected to return
* undefined after completing its execution.
*/
async function addYouTube(URLOrVideoID: string) {
let spinner;
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
spinner = ora({
...defaultOraOptions,
text: `Adding Video transcript from ${URLOrVideoID} to the Context Vector Store`,
}).start();
const transcript = await YoutubeTranscript.fetchTranscript(URLOrVideoID);
const text = transcript.map((part) => part.text).join(' ');
const splitter = new RecursiveCharacterTextSplitter();
const videoDocs = await splitter.splitDocuments([
new Document({
pageContent: text,
}),
]);
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(videoDocs);
await vectorStore.save(dbDirectory);
spinner.succeed();
return;
} catch (error) {
if (spinner) {
spinner.fail(chalk.red(error));
} else {
output.write(chalk.red(error));
}
}
}
/**
* The function crawls a given URL, extracts text from the pages, splits the text into documents,
* generates embeddings for the documents, and saves them to a vector store.
* @param {string} URL - The URL of the website to crawl and extract text from.
* @param {string} selector - The selector parameter is a string that represents a CSS selector used to
* identify the HTML elements to be crawled on the web page. The WebCrawler will only crawl the
* elements that match the selector.
* @param {number} maxPages - The maximum number of pages to crawl for the given URL.
* @param {number} numberOfCharactersRequired - `numberOfCharactersRequired` is a number that specifies
* the minimum number of characters required for a document to be considered valid and used for
* generating embeddings. Any document with less than this number of characters will be discarded.
* @returns Nothing is being returned explicitly in the function, but it is implied that the function
* will return undefined if there are no errors.
*/
async function addURL(URL: string, selector: string, maxPages: number, numberOfCharactersRequired: number) {
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
const addUrlSpinner = ora({ ...defaultOraOptions, text: `Crawling ${URL}` });
let documents;
try {
addUrlSpinner.start();
const progressCallback = (linksFound: number, linksCrawled: number, currentUrl: string) => {
addUrlSpinner.text = `Links found: ${linksFound} - Links crawled: ${linksCrawled} - Crawling ${currentUrl}`;
};
const crawler = new WebCrawler([URL], progressCallback, selector, maxPages, numberOfCharactersRequired);
const pages = (await crawler.start()) as Page[];
documents = await Promise.all(
pages.map((row) => {
const splitter = new RecursiveCharacterTextSplitter();
const webDocs = splitter.splitDocuments([
new Document({
pageContent: row.text,
}),
]);
return webDocs;
})
);
addUrlSpinner.succeed();
} catch (error) {
addUrlSpinner.fail(chalk.red(error));
}
if (documents) {
const generateEmbeddingsSpinner = ora({ ...defaultOraOptions, text: `Generating Embeddings` });
try {
const flattenedDocuments = documents.flat();
generateEmbeddingsSpinner.text = `Generating Embeddings for ${flattenedDocuments.length} documents`;
generateEmbeddingsSpinner.start();
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(flattenedDocuments);
await vectorStore.save(dbDirectory);
generateEmbeddingsSpinner.succeed();
return;
} catch (error) {
generateEmbeddingsSpinner.fail(chalk.red(error));
}
}
}
async function listContextStores() {
const projectRoot = getProjectRoot(); // Please replace this with your actual function to get the project root
const vectorStoreDir = process.env.VECTOR_STORE_BASE_DIR || 'db';
const targetDir = path.join(projectRoot, vectorStoreDir);
const contextVectorStoresList = await getDirectoryListWithDetails(targetDir);
output.write(chalk.blue(`Context Vector Stores in ${targetDir}:\n\n`));
Object.entries(contextVectorStoresList).forEach(([dir, files]) => {
output.write(chalk.yellow(`Directory: ${dir}`));
if (dir === getConfig().currentVectorStoreDatabasePath) {
output.write(chalk.green(` (Currently selected)`));
}
output.write('\n');
files.forEach((file) => {
output.write(chalk.yellow(` File: ${file.name}, Size: ${file.size} KB\n`));
});
});
}
export { getContextVectorStore, addDocument, addURL, addYouTube, listContextStores, loadOrCreateEmptyVectorStore };
| src/lib/contextManager.ts | gmickel-memorybot-bad0302 | [
{
"filename": "src/lib/vectorStoreUtils.ts",
"retrieved_chunk": "import { HNSWLib } from 'langchain/vectorstores/hnswlib';\n/**\n * Retrieves relevant context for the given question by performing a similarity search on the provided vector store.\n * @param {HNSWLib} vectorStore - HNSWLib is a library for approximate nearest neighbor search, used to\n * search for similar vectors in a high-dimensional space.\n * @param {string} sanitizedQuestion - The sanitized version of the question that needs to be answered.\n * It is a string input.\n * @param {number} numDocuments - The `numDocuments` parameter is the number of documents that the\n * `getRelevantContext` function should retrieve from the `vectorStore` based on their similarity to\n * the `sanitizedQuestion`.",
"score": 0.8544936776161194
},
{
"filename": "src/lib/vectorStoreUtils.ts",
"retrieved_chunk": " * @returns The function `getRelevantContext` is returning a Promise that resolves to a string. The\n * string is the concatenation of the `pageContent` property of the top `numDocuments` documents\n * returned by a similarity search performed on a `vectorStore` using the `sanitizedQuestion` as the\n * query. The resulting string is trimmed and all newline characters are replaced with spaces.\n */\nasync function getRelevantContext(\n vectorStore: HNSWLib,\n sanitizedQuestion: string,\n numDocuments: number\n): Promise<string> {",
"score": 0.8344928026199341
},
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": " }\n return chalk.red(`All files in the memory directory have been deleted: ${error}`);\n }\n}\nasync function resetMemoryVectorStore(onReset: (newMemoryVectorStore: HNSWLib) => void) {\n const newMemoryVectorStore = new HNSWLib(new OpenAIEmbeddings(), {\n space: 'cosine',\n numDimensions: 1536,\n });\n await deleteMemoryDirectory();",
"score": 0.7944386005401611
},
{
"filename": "src/commands/command.ts",
"retrieved_chunk": "/**\n * The function creates a command object with a name, aliases, description, and an execute function\n * that returns a Promise.\n * @param {string} name - A string representing the name of the command.\n * @param {string[]} aliases - An array of alternative names that can be used to call the command. For\n * example, if the command is named \"help\", aliases could include \"h\" or \"info\".\n * @param {string} description - A brief description of what the command does.\n * @param execute - The `execute` parameter is a function that takes in three arguments:\n * @returns A `Command` object is being returned.\n */",
"score": 0.7783880233764648
},
{
"filename": "src/utils/resolveURL.ts",
"retrieved_chunk": "/**\n * The function resolves a URL from a given base URL and returns the resolved URL as a string.\n * @param {string} from - The `from` parameter is a string representing the base URL that the `to`\n * parameter will be resolved against. It can be an absolute or relative URL.\n * @param {string} to - The `to` parameter is a string representing the URL that needs to be resolved.\n * It can be an absolute URL or a relative URL.\n * @returns The function `resolve` returns a string that represents the resolved URL. If the `to`\n * parameter is a relative URL, the function returns a string that represents the resolved URL relative\n * to the `from` parameter. If the `to` parameter is an absolute URL, the function returns a string\n * that represents the resolved URL.",
"score": 0.7612500190734863
}
] | typescript | await createDirectory(getConfig().currentVectorStoreDatabasePath); |
import chalk from 'chalk';
import { stdout as output } from 'node:process';
import { OpenAIEmbeddings } from 'langchain/embeddings/openai';
import { HNSWLib } from 'langchain/vectorstores/hnswlib';
import { JSONLoader } from 'langchain/document_loaders/fs/json';
import { TextLoader } from 'langchain/document_loaders/fs/text';
import { PDFLoader } from 'langchain/document_loaders/fs/pdf';
import { DocxLoader } from 'langchain/document_loaders/fs/docx';
import { EPubLoader } from 'langchain/document_loaders/fs/epub';
import { CSVLoader } from 'langchain/document_loaders/fs/csv';
import ora from 'ora';
import { MarkdownTextSplitter, RecursiveCharacterTextSplitter } from 'langchain/text_splitter';
import { Document } from 'langchain/document';
import path from 'path';
import { YoutubeTranscript } from 'youtube-transcript';
import getDirectoryListWithDetails from '../utils/getDirectoryListWithDetails.js';
import createDirectory from '../utils/createDirectory.js';
import { getConfig, getDefaultOraOptions, getProjectRoot, setCurrentVectorStoreDatabasePath } from '../config/index.js';
import getDirectoryFiles from '../utils/getDirectoryFiles.js';
import WebCrawler from './crawler.js';
const projectRootDir = getProjectRoot();
const defaultOraOptions = getDefaultOraOptions(output);
/**
* This function loads and splits a file based on its extension using different loaders and text
* splitters.
* @param {string} filePath - A string representing the path to the file that needs to be loaded and
* split into documents.
* @returns The function `loadAndSplitFile` returns a Promise that resolves to an array of `Document`
* objects, where each `Document` represents a split portion of the input file. The type of the
* `Document` object is `Document<Record<string, unknown>>`, which means it has a generic type
* parameter that is an object with string keys and unknown values.
*/
async function loadAndSplitFile(filePath: string): Promise<Document<Record<string, unknown>>[]> {
const fileExtension = path.extname(filePath);
let loader;
let documents: Document<Record<string, unknown>>[];
switch (fileExtension) {
case '.json':
loader = new JSONLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.txt':
loader = new TextLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.md':
loader = new TextLoader(filePath);
documents = await loader.loadAndSplit(new MarkdownTextSplitter());
break;
case '.pdf':
loader = new PDFLoader(filePath, { splitPages: false });
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.docx':
loader = new DocxLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.csv':
loader = new CSVLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.epub':
loader = new EPubLoader(filePath, { splitChapters: false });
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
default:
throw new Error(`Unsupported file extension: ${fileExtension}`);
}
return documents;
}
/**
* This function loads or creates a vector store using HNSWLib and OpenAIEmbeddings.
* @returns The function `loadOrCreateVectorStore` returns a Promise that resolves to an instance of
* the `HNSWLib` class, which is a vector store used for storing and searching high-dimensional
* vectors.
*/
async function loadOrCreateVectorStore(): Promise<HNSWLib> {
let vectorStore: HNSWLib;
let spinner;
await createDirectory(getConfig().currentVectorStoreDatabasePath);
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));
} catch {
spinner = ora({
...defaultOraOptions,
text: chalk.blue(`Creating new Context Vector Store in the ${dbDirectory} directory`),
}).start();
const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');
const filesToAdd = await getDirectoryFiles(docsDirectory);
const documents = await Promise.all(filesToAdd.map((filePath) => loadAndSplitFile(filePath)));
const flattenedDocuments = documents.reduce((acc, val) => acc.concat(val), []);
vectorStore = await HNSWLib.fromDocuments(flattenedDocuments, new OpenAIEmbeddings({ maxConcurrency: 5 }));
await vectorStore.save(dbDirectory);
spinner.succeed();
}
return vectorStore;
}
const contextVectorStore = await loadOrCreateVectorStore();
const contextWrapper = {
contextInstance: contextVectorStore,
};
/**
* This function loads or creates a new empty Context Vector Store using HNSWLib and OpenAIEmbeddings.
* @returns a Promise that resolves to an instance of the HNSWLib class, which represents a
* hierarchical navigable small world graph used for nearest neighbor search. The instance is either
* loaded from an existing directory or created as a new empty Context Vector Store with specified
* parameters.
*/
async function loadOrCreateEmptyVectorStore(subDirectory: string): Promise<HNSWLib> {
let vectorStore: HNSWLib;
let spinner;
const newContextVectorStorePath = path.join(projectRootDir, process.env.VECTOR_STORE_BASE_DIR || 'db', subDirectory);
await createDirectory(newContextVectorStorePath);
setCurrentVectorStoreDatabasePath(newContextVectorStorePath);
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));
output.write(chalk.blue(`Using Context Vector Store in the ${dbDirectory} directory\n`));
} catch {
spinner = ora({
...defaultOraOptions,
text: chalk.blue(`Creating new empty Context Vector Store in the ${dbDirectory} directory`),
}).start();
vectorStore = new HNSWLib(new OpenAIEmbeddings({ maxConcurrency: 5 }), {
space: 'cosine',
numDimensions: 1536,
});
spinner.succeed();
output.write(
chalk.red.bold(
`\nThe Context Vector Store is currently empty and unsaved, add context to is using \`/add-docs\`, \`/add-url\` or \`/add-youtube\``
)
);
}
contextWrapper.contextInstance = vectorStore;
return vectorStore;
}
async function getContextVectorStore() {
return contextWrapper.contextInstance;
}
/**
* This function adds documents to a context vector store and saves them.
* @param {string[]} filePaths - The `filePaths` parameter is an array of strings representing the file
* paths of the documents that need to be added to the Context Vector Store.
* @returns nothing (`undefined`).
*/
async function addDocument(filePaths: string[]) {
let spinner;
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
spinner = ora({ ...defaultOraOptions, text: `Adding files to the Context Vector Store` }).start();
const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');
const documents = await Promise.all(
filePaths.map((filePath) => loadAndSplitFile(path.join(docsDirectory, filePath)))
);
const flattenedDocuments = documents.reduce((acc, val) => acc.concat(val), []);
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(flattenedDocuments);
await vectorStore.save(dbDirectory);
spinner.succeed();
return;
} catch (error) {
if (spinner) {
spinner.fail(chalk.red(error));
} else {
output.write(chalk.red(error));
}
}
}
/**
* The function adds a YouTube video transcript to a Context Vector Store.
* @param {string} URLOrVideoID - The URLOrVideoID parameter is a string that represents either the URL
* or the video ID of a YouTube video.
* @returns Nothing is being returned explicitly in the code, but the function is expected to return
* undefined after completing its execution.
*/
async function addYouTube(URLOrVideoID: string) {
let spinner;
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
spinner = ora({
...defaultOraOptions,
text: `Adding Video transcript from ${URLOrVideoID} to the Context Vector Store`,
}).start();
const transcript = await YoutubeTranscript.fetchTranscript(URLOrVideoID);
const text = transcript.map((part) => part.text).join(' ');
const splitter = new RecursiveCharacterTextSplitter();
const videoDocs = await splitter.splitDocuments([
new Document({
pageContent: text,
}),
]);
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(videoDocs);
await vectorStore.save(dbDirectory);
spinner.succeed();
return;
} catch (error) {
if (spinner) {
spinner.fail(chalk.red(error));
} else {
output.write(chalk.red(error));
}
}
}
/**
* The function crawls a given URL, extracts text from the pages, splits the text into documents,
* generates embeddings for the documents, and saves them to a vector store.
* @param {string} URL - The URL of the website to crawl and extract text from.
* @param {string} selector - The selector parameter is a string that represents a CSS selector used to
* identify the HTML elements to be crawled on the web page. The WebCrawler will only crawl the
* elements that match the selector.
* @param {number} maxPages - The maximum number of pages to crawl for the given URL.
* @param {number} numberOfCharactersRequired - `numberOfCharactersRequired` is a number that specifies
* the minimum number of characters required for a document to be considered valid and used for
* generating embeddings. Any document with less than this number of characters will be discarded.
* @returns Nothing is being returned explicitly in the function, but it is implied that the function
* will return undefined if there are no errors.
*/
async function addURL(URL: string, selector: string, maxPages: number, numberOfCharactersRequired: number) {
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
const addUrlSpinner = ora({ ...defaultOraOptions, text: `Crawling ${URL}` });
let documents;
try {
addUrlSpinner.start();
const progressCallback = (linksFound: number, linksCrawled: number, currentUrl: string) => {
addUrlSpinner.text = `Links found: ${linksFound} - Links crawled: ${linksCrawled} - Crawling ${currentUrl}`;
};
const crawler = | new WebCrawler([URL], progressCallback, selector, maxPages, numberOfCharactersRequired); |
const pages = (await crawler.start()) as Page[];
documents = await Promise.all(
pages.map((row) => {
const splitter = new RecursiveCharacterTextSplitter();
const webDocs = splitter.splitDocuments([
new Document({
pageContent: row.text,
}),
]);
return webDocs;
})
);
addUrlSpinner.succeed();
} catch (error) {
addUrlSpinner.fail(chalk.red(error));
}
if (documents) {
const generateEmbeddingsSpinner = ora({ ...defaultOraOptions, text: `Generating Embeddings` });
try {
const flattenedDocuments = documents.flat();
generateEmbeddingsSpinner.text = `Generating Embeddings for ${flattenedDocuments.length} documents`;
generateEmbeddingsSpinner.start();
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(flattenedDocuments);
await vectorStore.save(dbDirectory);
generateEmbeddingsSpinner.succeed();
return;
} catch (error) {
generateEmbeddingsSpinner.fail(chalk.red(error));
}
}
}
async function listContextStores() {
const projectRoot = getProjectRoot(); // Please replace this with your actual function to get the project root
const vectorStoreDir = process.env.VECTOR_STORE_BASE_DIR || 'db';
const targetDir = path.join(projectRoot, vectorStoreDir);
const contextVectorStoresList = await getDirectoryListWithDetails(targetDir);
output.write(chalk.blue(`Context Vector Stores in ${targetDir}:\n\n`));
Object.entries(contextVectorStoresList).forEach(([dir, files]) => {
output.write(chalk.yellow(`Directory: ${dir}`));
if (dir === getConfig().currentVectorStoreDatabasePath) {
output.write(chalk.green(` (Currently selected)`));
}
output.write('\n');
files.forEach((file) => {
output.write(chalk.yellow(` File: ${file.name}, Size: ${file.size} KB\n`));
});
});
}
export { getContextVectorStore, addDocument, addURL, addYouTube, listContextStores, loadOrCreateEmptyVectorStore };
| src/lib/contextManager.ts | gmickel-memorybot-bad0302 | [
{
"filename": "src/lib/crawler.ts",
"retrieved_chunk": " this.urls = urls;\n this.selector = selector;\n this.limit = limit;\n this.textLengthMinimum = textLengthMinimum;\n this.progressCallback = progressCallback;\n this.count = 0;\n this.pages = [];\n this.crawler = new Crawler({\n maxConnections: 10,\n callback: this.handleRequest,",
"score": 0.7631409168243408
},
{
"filename": "src/commands/addURLCommand.ts",
"retrieved_chunk": "import chalk from 'chalk';\nimport createCommand from './command.js';\nimport { addURL } from '../lib/contextManager.js';\nconst addURLCommand = createCommand(\n 'add-url',\n ['url'],\n `Scrapes the content from a url and adds it to the context vector store.\\n\n Arguments: \\`url\\`, \\`selector to extract\\` (Default: body), \\`Maximum number of links to follow\\` (Default: 20), \\`Ignore pages with less than n characters\\` (Default: 200)\\n\n Example: /add-url https://dociq.io main 10 500\\n\n This operation may try to generate a large number of embeddings depending on the structure of the web pages and may lead to rate-limiting.\\n",
"score": 0.7530304193496704
},
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": " documents: Array<{ content: string; metadataType: string }>\n): Promise<void> {\n const formattedDocuments = documents.map(\n (doc) => new Document({ pageContent: doc.content, metadata: { type: doc.metadataType } })\n );\n await memoryWrapper.vectorStoreInstance.addDocuments(formattedDocuments);\n await saveMemoryVectorStore();\n}\nfunction resetBufferWindowMemory() {\n bufferWindowMemory.clear();",
"score": 0.7486959099769592
},
{
"filename": "src/lib/crawler.ts",
"retrieved_chunk": " const text = $(this.selector).text();\n // const text = turndownService.turndown(html || '');\n const page: Page = {\n url: res.request.uri.href,\n text,\n title,\n };\n if (text.length > this.textLengthMinimum) {\n this.pages.push(page);\n this.progressCallback(this.count + 1, this.pages.length, res.request.uri.href);",
"score": 0.7471909523010254
},
{
"filename": "src/lib/crawler.ts",
"retrieved_chunk": " title: string;\n}\n/* The WebCrawler class is a TypeScript implementation of a web crawler that can extract text from web\npages and follow links to crawl more pages. */\nclass WebCrawler {\n pages: Page[];\n limit: number;\n urls: string[];\n count: number;\n textLengthMinimum: number;",
"score": 0.7368782758712769
}
] | typescript | new WebCrawler([URL], progressCallback, selector, maxPages, numberOfCharactersRequired); |
import chalk from 'chalk';
import { stdout as output } from 'node:process';
import { OpenAIEmbeddings } from 'langchain/embeddings/openai';
import { HNSWLib } from 'langchain/vectorstores/hnswlib';
import { JSONLoader } from 'langchain/document_loaders/fs/json';
import { TextLoader } from 'langchain/document_loaders/fs/text';
import { PDFLoader } from 'langchain/document_loaders/fs/pdf';
import { DocxLoader } from 'langchain/document_loaders/fs/docx';
import { EPubLoader } from 'langchain/document_loaders/fs/epub';
import { CSVLoader } from 'langchain/document_loaders/fs/csv';
import ora from 'ora';
import { MarkdownTextSplitter, RecursiveCharacterTextSplitter } from 'langchain/text_splitter';
import { Document } from 'langchain/document';
import path from 'path';
import { YoutubeTranscript } from 'youtube-transcript';
import getDirectoryListWithDetails from '../utils/getDirectoryListWithDetails.js';
import createDirectory from '../utils/createDirectory.js';
import { getConfig, getDefaultOraOptions, getProjectRoot, setCurrentVectorStoreDatabasePath } from '../config/index.js';
import getDirectoryFiles from '../utils/getDirectoryFiles.js';
import WebCrawler from './crawler.js';
const projectRootDir = getProjectRoot();
const defaultOraOptions = getDefaultOraOptions(output);
/**
* This function loads and splits a file based on its extension using different loaders and text
* splitters.
* @param {string} filePath - A string representing the path to the file that needs to be loaded and
* split into documents.
* @returns The function `loadAndSplitFile` returns a Promise that resolves to an array of `Document`
* objects, where each `Document` represents a split portion of the input file. The type of the
* `Document` object is `Document<Record<string, unknown>>`, which means it has a generic type
* parameter that is an object with string keys and unknown values.
*/
async function loadAndSplitFile(filePath: string): Promise<Document<Record<string, unknown>>[]> {
const fileExtension = path.extname(filePath);
let loader;
let documents: Document<Record<string, unknown>>[];
switch (fileExtension) {
case '.json':
loader = new JSONLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.txt':
loader = new TextLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.md':
loader = new TextLoader(filePath);
documents = await loader.loadAndSplit(new MarkdownTextSplitter());
break;
case '.pdf':
loader = new PDFLoader(filePath, { splitPages: false });
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.docx':
loader = new DocxLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.csv':
loader = new CSVLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.epub':
loader = new EPubLoader(filePath, { splitChapters: false });
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
default:
throw new Error(`Unsupported file extension: ${fileExtension}`);
}
return documents;
}
/**
* This function loads or creates a vector store using HNSWLib and OpenAIEmbeddings.
* @returns The function `loadOrCreateVectorStore` returns a Promise that resolves to an instance of
* the `HNSWLib` class, which is a vector store used for storing and searching high-dimensional
* vectors.
*/
async function loadOrCreateVectorStore(): Promise<HNSWLib> {
let vectorStore: HNSWLib;
let spinner;
await createDirectory(getConfig().currentVectorStoreDatabasePath);
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));
} catch {
spinner = ora({
...defaultOraOptions,
text: chalk.blue(`Creating new Context Vector Store in the ${dbDirectory} directory`),
}).start();
const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');
| const filesToAdd = await getDirectoryFiles(docsDirectory); |
const documents = await Promise.all(filesToAdd.map((filePath) => loadAndSplitFile(filePath)));
const flattenedDocuments = documents.reduce((acc, val) => acc.concat(val), []);
vectorStore = await HNSWLib.fromDocuments(flattenedDocuments, new OpenAIEmbeddings({ maxConcurrency: 5 }));
await vectorStore.save(dbDirectory);
spinner.succeed();
}
return vectorStore;
}
const contextVectorStore = await loadOrCreateVectorStore();
const contextWrapper = {
contextInstance: contextVectorStore,
};
/**
* This function loads or creates a new empty Context Vector Store using HNSWLib and OpenAIEmbeddings.
* @returns a Promise that resolves to an instance of the HNSWLib class, which represents a
* hierarchical navigable small world graph used for nearest neighbor search. The instance is either
* loaded from an existing directory or created as a new empty Context Vector Store with specified
* parameters.
*/
async function loadOrCreateEmptyVectorStore(subDirectory: string): Promise<HNSWLib> {
let vectorStore: HNSWLib;
let spinner;
const newContextVectorStorePath = path.join(projectRootDir, process.env.VECTOR_STORE_BASE_DIR || 'db', subDirectory);
await createDirectory(newContextVectorStorePath);
setCurrentVectorStoreDatabasePath(newContextVectorStorePath);
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));
output.write(chalk.blue(`Using Context Vector Store in the ${dbDirectory} directory\n`));
} catch {
spinner = ora({
...defaultOraOptions,
text: chalk.blue(`Creating new empty Context Vector Store in the ${dbDirectory} directory`),
}).start();
vectorStore = new HNSWLib(new OpenAIEmbeddings({ maxConcurrency: 5 }), {
space: 'cosine',
numDimensions: 1536,
});
spinner.succeed();
output.write(
chalk.red.bold(
`\nThe Context Vector Store is currently empty and unsaved, add context to is using \`/add-docs\`, \`/add-url\` or \`/add-youtube\``
)
);
}
contextWrapper.contextInstance = vectorStore;
return vectorStore;
}
async function getContextVectorStore() {
return contextWrapper.contextInstance;
}
/**
* This function adds documents to a context vector store and saves them.
* @param {string[]} filePaths - The `filePaths` parameter is an array of strings representing the file
* paths of the documents that need to be added to the Context Vector Store.
* @returns nothing (`undefined`).
*/
async function addDocument(filePaths: string[]) {
let spinner;
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
spinner = ora({ ...defaultOraOptions, text: `Adding files to the Context Vector Store` }).start();
const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');
const documents = await Promise.all(
filePaths.map((filePath) => loadAndSplitFile(path.join(docsDirectory, filePath)))
);
const flattenedDocuments = documents.reduce((acc, val) => acc.concat(val), []);
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(flattenedDocuments);
await vectorStore.save(dbDirectory);
spinner.succeed();
return;
} catch (error) {
if (spinner) {
spinner.fail(chalk.red(error));
} else {
output.write(chalk.red(error));
}
}
}
/**
* The function adds a YouTube video transcript to a Context Vector Store.
* @param {string} URLOrVideoID - The URLOrVideoID parameter is a string that represents either the URL
* or the video ID of a YouTube video.
* @returns Nothing is being returned explicitly in the code, but the function is expected to return
* undefined after completing its execution.
*/
async function addYouTube(URLOrVideoID: string) {
let spinner;
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
spinner = ora({
...defaultOraOptions,
text: `Adding Video transcript from ${URLOrVideoID} to the Context Vector Store`,
}).start();
const transcript = await YoutubeTranscript.fetchTranscript(URLOrVideoID);
const text = transcript.map((part) => part.text).join(' ');
const splitter = new RecursiveCharacterTextSplitter();
const videoDocs = await splitter.splitDocuments([
new Document({
pageContent: text,
}),
]);
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(videoDocs);
await vectorStore.save(dbDirectory);
spinner.succeed();
return;
} catch (error) {
if (spinner) {
spinner.fail(chalk.red(error));
} else {
output.write(chalk.red(error));
}
}
}
/**
* The function crawls a given URL, extracts text from the pages, splits the text into documents,
* generates embeddings for the documents, and saves them to a vector store.
* @param {string} URL - The URL of the website to crawl and extract text from.
* @param {string} selector - The selector parameter is a string that represents a CSS selector used to
* identify the HTML elements to be crawled on the web page. The WebCrawler will only crawl the
* elements that match the selector.
* @param {number} maxPages - The maximum number of pages to crawl for the given URL.
* @param {number} numberOfCharactersRequired - `numberOfCharactersRequired` is a number that specifies
* the minimum number of characters required for a document to be considered valid and used for
* generating embeddings. Any document with less than this number of characters will be discarded.
* @returns Nothing is being returned explicitly in the function, but it is implied that the function
* will return undefined if there are no errors.
*/
async function addURL(URL: string, selector: string, maxPages: number, numberOfCharactersRequired: number) {
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
const addUrlSpinner = ora({ ...defaultOraOptions, text: `Crawling ${URL}` });
let documents;
try {
addUrlSpinner.start();
const progressCallback = (linksFound: number, linksCrawled: number, currentUrl: string) => {
addUrlSpinner.text = `Links found: ${linksFound} - Links crawled: ${linksCrawled} - Crawling ${currentUrl}`;
};
const crawler = new WebCrawler([URL], progressCallback, selector, maxPages, numberOfCharactersRequired);
const pages = (await crawler.start()) as Page[];
documents = await Promise.all(
pages.map((row) => {
const splitter = new RecursiveCharacterTextSplitter();
const webDocs = splitter.splitDocuments([
new Document({
pageContent: row.text,
}),
]);
return webDocs;
})
);
addUrlSpinner.succeed();
} catch (error) {
addUrlSpinner.fail(chalk.red(error));
}
if (documents) {
const generateEmbeddingsSpinner = ora({ ...defaultOraOptions, text: `Generating Embeddings` });
try {
const flattenedDocuments = documents.flat();
generateEmbeddingsSpinner.text = `Generating Embeddings for ${flattenedDocuments.length} documents`;
generateEmbeddingsSpinner.start();
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(flattenedDocuments);
await vectorStore.save(dbDirectory);
generateEmbeddingsSpinner.succeed();
return;
} catch (error) {
generateEmbeddingsSpinner.fail(chalk.red(error));
}
}
}
async function listContextStores() {
const projectRoot = getProjectRoot(); // Please replace this with your actual function to get the project root
const vectorStoreDir = process.env.VECTOR_STORE_BASE_DIR || 'db';
const targetDir = path.join(projectRoot, vectorStoreDir);
const contextVectorStoresList = await getDirectoryListWithDetails(targetDir);
output.write(chalk.blue(`Context Vector Stores in ${targetDir}:\n\n`));
Object.entries(contextVectorStoresList).forEach(([dir, files]) => {
output.write(chalk.yellow(`Directory: ${dir}`));
if (dir === getConfig().currentVectorStoreDatabasePath) {
output.write(chalk.green(` (Currently selected)`));
}
output.write('\n');
files.forEach((file) => {
output.write(chalk.yellow(` File: ${file.name}, Size: ${file.size} KB\n`));
});
});
}
export { getContextVectorStore, addDocument, addURL, addYouTube, listContextStores, loadOrCreateEmptyVectorStore };
| src/lib/contextManager.ts | gmickel-memorybot-bad0302 | [
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": "const memoryDirectory = path.join(projectRootDir, process.env.MEMORY_VECTOR_STORE_DIR || 'memory');\nlet memoryVectorStore: HNSWLib;\ntry {\n memoryVectorStore = await HNSWLib.load(memoryDirectory, new OpenAIEmbeddings());\n} catch {\n output.write(`${chalk.blue(`Creating a new memory vector store index in the ${memoryDirectory} directory`)}\\n`);\n memoryVectorStore = new HNSWLib(new OpenAIEmbeddings(), {\n space: 'cosine',\n numDimensions: 1536,\n });",
"score": 0.8621474504470825
},
{
"filename": "src/config/index.ts",
"retrieved_chunk": " return {\n text: 'Loading',\n stream: output,\n discardStdin: false,\n };\n}\nconst defaultConfig: Config = {\n currentVectorStoreDatabasePath: path.join(getProjectRoot(), process.env.VECTOR_STORE_DIR || 'db/default'),\n numContextDocumentsToRetrieve: 6,\n numMemoryDocumentsToRetrieve: 4,",
"score": 0.8352575302124023
},
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": " }\n return chalk.red(`All files in the memory directory have been deleted: ${error}`);\n }\n}\nasync function resetMemoryVectorStore(onReset: (newMemoryVectorStore: HNSWLib) => void) {\n const newMemoryVectorStore = new HNSWLib(new OpenAIEmbeddings(), {\n space: 'cosine',\n numDimensions: 1536,\n });\n await deleteMemoryDirectory();",
"score": 0.8178820610046387
},
{
"filename": "src/index.ts",
"retrieved_chunk": " output.write(chalk.green('\\nStart chatting or type /help for a list of commands\\n'));\n const userInput = await rl.question('> ');\n let response;\n if (userInput.startsWith('/')) {\n const [command, ...args] = userInput.slice(1).split(' ');\n await commandHandler.execute(command, args, output);\n } else {\n const memoryVectorStore = await getMemoryVectorStore();\n const contextVectorStore = await getContextVectorStore();\n const question = sanitizeInput(userInput);",
"score": 0.784040093421936
},
{
"filename": "src/index.ts",
"retrieved_chunk": "import { getRelevantContext } from './lib/vectorStoreUtils.js';\nimport sanitizeInput from './utils/sanitizeInput.js';\nimport { getConfig, getProjectRoot } from './config/index.js';\nconst projectRootDir = getProjectRoot();\ndotenv.config();\n// Set up the chat log directory\nconst chatLogDirectory = path.join(projectRootDir, 'chat_logs');\n// Get the prompt template\nconst systemPromptTemplate = fs.readFileSync(path.join(projectRootDir, 'src/prompt.txt'), 'utf8');\n// Set up the readline interface to read input from the user and write output to the console",
"score": 0.7820063829421997
}
] | typescript | const filesToAdd = await getDirectoryFiles(docsDirectory); |
import chalk from 'chalk';
import { stdout as output } from 'node:process';
import { OpenAIEmbeddings } from 'langchain/embeddings/openai';
import { HNSWLib } from 'langchain/vectorstores/hnswlib';
import { JSONLoader } from 'langchain/document_loaders/fs/json';
import { TextLoader } from 'langchain/document_loaders/fs/text';
import { PDFLoader } from 'langchain/document_loaders/fs/pdf';
import { DocxLoader } from 'langchain/document_loaders/fs/docx';
import { EPubLoader } from 'langchain/document_loaders/fs/epub';
import { CSVLoader } from 'langchain/document_loaders/fs/csv';
import ora from 'ora';
import { MarkdownTextSplitter, RecursiveCharacterTextSplitter } from 'langchain/text_splitter';
import { Document } from 'langchain/document';
import path from 'path';
import { YoutubeTranscript } from 'youtube-transcript';
import getDirectoryListWithDetails from '../utils/getDirectoryListWithDetails.js';
import createDirectory from '../utils/createDirectory.js';
import { getConfig, getDefaultOraOptions, getProjectRoot, setCurrentVectorStoreDatabasePath } from '../config/index.js';
import getDirectoryFiles from '../utils/getDirectoryFiles.js';
import WebCrawler from './crawler.js';
const projectRootDir = getProjectRoot();
const defaultOraOptions = getDefaultOraOptions(output);
/**
* This function loads and splits a file based on its extension using different loaders and text
* splitters.
* @param {string} filePath - A string representing the path to the file that needs to be loaded and
* split into documents.
* @returns The function `loadAndSplitFile` returns a Promise that resolves to an array of `Document`
* objects, where each `Document` represents a split portion of the input file. The type of the
* `Document` object is `Document<Record<string, unknown>>`, which means it has a generic type
* parameter that is an object with string keys and unknown values.
*/
async function loadAndSplitFile(filePath: string): Promise<Document<Record<string, unknown>>[]> {
const fileExtension = path.extname(filePath);
let loader;
let documents: Document<Record<string, unknown>>[];
switch (fileExtension) {
case '.json':
loader = new JSONLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.txt':
loader = new TextLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.md':
loader = new TextLoader(filePath);
documents = await loader.loadAndSplit(new MarkdownTextSplitter());
break;
case '.pdf':
loader = new PDFLoader(filePath, { splitPages: false });
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.docx':
loader = new DocxLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.csv':
loader = new CSVLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.epub':
loader = new EPubLoader(filePath, { splitChapters: false });
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
default:
throw new Error(`Unsupported file extension: ${fileExtension}`);
}
return documents;
}
/**
* This function loads or creates a vector store using HNSWLib and OpenAIEmbeddings.
* @returns The function `loadOrCreateVectorStore` returns a Promise that resolves to an instance of
* the `HNSWLib` class, which is a vector store used for storing and searching high-dimensional
* vectors.
*/
async function loadOrCreateVectorStore(): Promise<HNSWLib> {
let vectorStore: HNSWLib;
let spinner;
await createDirectory(getConfig().currentVectorStoreDatabasePath);
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));
} catch {
spinner = ora({
...defaultOraOptions,
text: chalk.blue(`Creating new Context Vector Store in the ${dbDirectory} directory`),
}).start();
const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');
const filesToAdd = await getDirectoryFiles(docsDirectory);
| const documents = await Promise.all(filesToAdd.map((filePath) => loadAndSplitFile(filePath))); |
const flattenedDocuments = documents.reduce((acc, val) => acc.concat(val), []);
vectorStore = await HNSWLib.fromDocuments(flattenedDocuments, new OpenAIEmbeddings({ maxConcurrency: 5 }));
await vectorStore.save(dbDirectory);
spinner.succeed();
}
return vectorStore;
}
const contextVectorStore = await loadOrCreateVectorStore();
const contextWrapper = {
contextInstance: contextVectorStore,
};
/**
* This function loads or creates a new empty Context Vector Store using HNSWLib and OpenAIEmbeddings.
* @returns a Promise that resolves to an instance of the HNSWLib class, which represents a
* hierarchical navigable small world graph used for nearest neighbor search. The instance is either
* loaded from an existing directory or created as a new empty Context Vector Store with specified
* parameters.
*/
async function loadOrCreateEmptyVectorStore(subDirectory: string): Promise<HNSWLib> {
let vectorStore: HNSWLib;
let spinner;
const newContextVectorStorePath = path.join(projectRootDir, process.env.VECTOR_STORE_BASE_DIR || 'db', subDirectory);
await createDirectory(newContextVectorStorePath);
setCurrentVectorStoreDatabasePath(newContextVectorStorePath);
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));
output.write(chalk.blue(`Using Context Vector Store in the ${dbDirectory} directory\n`));
} catch {
spinner = ora({
...defaultOraOptions,
text: chalk.blue(`Creating new empty Context Vector Store in the ${dbDirectory} directory`),
}).start();
vectorStore = new HNSWLib(new OpenAIEmbeddings({ maxConcurrency: 5 }), {
space: 'cosine',
numDimensions: 1536,
});
spinner.succeed();
output.write(
chalk.red.bold(
`\nThe Context Vector Store is currently empty and unsaved, add context to is using \`/add-docs\`, \`/add-url\` or \`/add-youtube\``
)
);
}
contextWrapper.contextInstance = vectorStore;
return vectorStore;
}
async function getContextVectorStore() {
return contextWrapper.contextInstance;
}
/**
* This function adds documents to a context vector store and saves them.
* @param {string[]} filePaths - The `filePaths` parameter is an array of strings representing the file
* paths of the documents that need to be added to the Context Vector Store.
* @returns nothing (`undefined`).
*/
async function addDocument(filePaths: string[]) {
let spinner;
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
spinner = ora({ ...defaultOraOptions, text: `Adding files to the Context Vector Store` }).start();
const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');
const documents = await Promise.all(
filePaths.map((filePath) => loadAndSplitFile(path.join(docsDirectory, filePath)))
);
const flattenedDocuments = documents.reduce((acc, val) => acc.concat(val), []);
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(flattenedDocuments);
await vectorStore.save(dbDirectory);
spinner.succeed();
return;
} catch (error) {
if (spinner) {
spinner.fail(chalk.red(error));
} else {
output.write(chalk.red(error));
}
}
}
/**
* The function adds a YouTube video transcript to a Context Vector Store.
* @param {string} URLOrVideoID - The URLOrVideoID parameter is a string that represents either the URL
* or the video ID of a YouTube video.
* @returns Nothing is being returned explicitly in the code, but the function is expected to return
* undefined after completing its execution.
*/
async function addYouTube(URLOrVideoID: string) {
let spinner;
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
spinner = ora({
...defaultOraOptions,
text: `Adding Video transcript from ${URLOrVideoID} to the Context Vector Store`,
}).start();
const transcript = await YoutubeTranscript.fetchTranscript(URLOrVideoID);
const text = transcript.map((part) => part.text).join(' ');
const splitter = new RecursiveCharacterTextSplitter();
const videoDocs = await splitter.splitDocuments([
new Document({
pageContent: text,
}),
]);
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(videoDocs);
await vectorStore.save(dbDirectory);
spinner.succeed();
return;
} catch (error) {
if (spinner) {
spinner.fail(chalk.red(error));
} else {
output.write(chalk.red(error));
}
}
}
/**
* The function crawls a given URL, extracts text from the pages, splits the text into documents,
* generates embeddings for the documents, and saves them to a vector store.
* @param {string} URL - The URL of the website to crawl and extract text from.
* @param {string} selector - The selector parameter is a string that represents a CSS selector used to
* identify the HTML elements to be crawled on the web page. The WebCrawler will only crawl the
* elements that match the selector.
* @param {number} maxPages - The maximum number of pages to crawl for the given URL.
* @param {number} numberOfCharactersRequired - `numberOfCharactersRequired` is a number that specifies
* the minimum number of characters required for a document to be considered valid and used for
* generating embeddings. Any document with less than this number of characters will be discarded.
* @returns Nothing is being returned explicitly in the function, but it is implied that the function
* will return undefined if there are no errors.
*/
async function addURL(URL: string, selector: string, maxPages: number, numberOfCharactersRequired: number) {
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
const addUrlSpinner = ora({ ...defaultOraOptions, text: `Crawling ${URL}` });
let documents;
try {
addUrlSpinner.start();
const progressCallback = (linksFound: number, linksCrawled: number, currentUrl: string) => {
addUrlSpinner.text = `Links found: ${linksFound} - Links crawled: ${linksCrawled} - Crawling ${currentUrl}`;
};
const crawler = new WebCrawler([URL], progressCallback, selector, maxPages, numberOfCharactersRequired);
const pages = (await crawler.start()) as Page[];
documents = await Promise.all(
pages.map((row) => {
const splitter = new RecursiveCharacterTextSplitter();
const webDocs = splitter.splitDocuments([
new Document({
pageContent: row.text,
}),
]);
return webDocs;
})
);
addUrlSpinner.succeed();
} catch (error) {
addUrlSpinner.fail(chalk.red(error));
}
if (documents) {
const generateEmbeddingsSpinner = ora({ ...defaultOraOptions, text: `Generating Embeddings` });
try {
const flattenedDocuments = documents.flat();
generateEmbeddingsSpinner.text = `Generating Embeddings for ${flattenedDocuments.length} documents`;
generateEmbeddingsSpinner.start();
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(flattenedDocuments);
await vectorStore.save(dbDirectory);
generateEmbeddingsSpinner.succeed();
return;
} catch (error) {
generateEmbeddingsSpinner.fail(chalk.red(error));
}
}
}
async function listContextStores() {
const projectRoot = getProjectRoot(); // Please replace this with your actual function to get the project root
const vectorStoreDir = process.env.VECTOR_STORE_BASE_DIR || 'db';
const targetDir = path.join(projectRoot, vectorStoreDir);
const contextVectorStoresList = await getDirectoryListWithDetails(targetDir);
output.write(chalk.blue(`Context Vector Stores in ${targetDir}:\n\n`));
Object.entries(contextVectorStoresList).forEach(([dir, files]) => {
output.write(chalk.yellow(`Directory: ${dir}`));
if (dir === getConfig().currentVectorStoreDatabasePath) {
output.write(chalk.green(` (Currently selected)`));
}
output.write('\n');
files.forEach((file) => {
output.write(chalk.yellow(` File: ${file.name}, Size: ${file.size} KB\n`));
});
});
}
export { getContextVectorStore, addDocument, addURL, addYouTube, listContextStores, loadOrCreateEmptyVectorStore };
| src/lib/contextManager.ts | gmickel-memorybot-bad0302 | [
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": "const memoryDirectory = path.join(projectRootDir, process.env.MEMORY_VECTOR_STORE_DIR || 'memory');\nlet memoryVectorStore: HNSWLib;\ntry {\n memoryVectorStore = await HNSWLib.load(memoryDirectory, new OpenAIEmbeddings());\n} catch {\n output.write(`${chalk.blue(`Creating a new memory vector store index in the ${memoryDirectory} directory`)}\\n`);\n memoryVectorStore = new HNSWLib(new OpenAIEmbeddings(), {\n space: 'cosine',\n numDimensions: 1536,\n });",
"score": 0.8376823663711548
},
{
"filename": "src/config/index.ts",
"retrieved_chunk": " return {\n text: 'Loading',\n stream: output,\n discardStdin: false,\n };\n}\nconst defaultConfig: Config = {\n currentVectorStoreDatabasePath: path.join(getProjectRoot(), process.env.VECTOR_STORE_DIR || 'db/default'),\n numContextDocumentsToRetrieve: 6,\n numMemoryDocumentsToRetrieve: 4,",
"score": 0.8211661577224731
},
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": " }\n return chalk.red(`All files in the memory directory have been deleted: ${error}`);\n }\n}\nasync function resetMemoryVectorStore(onReset: (newMemoryVectorStore: HNSWLib) => void) {\n const newMemoryVectorStore = new HNSWLib(new OpenAIEmbeddings(), {\n space: 'cosine',\n numDimensions: 1536,\n });\n await deleteMemoryDirectory();",
"score": 0.808192253112793
},
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": " documents: Array<{ content: string; metadataType: string }>\n): Promise<void> {\n const formattedDocuments = documents.map(\n (doc) => new Document({ pageContent: doc.content, metadata: { type: doc.metadataType } })\n );\n await memoryWrapper.vectorStoreInstance.addDocuments(formattedDocuments);\n await saveMemoryVectorStore();\n}\nfunction resetBufferWindowMemory() {\n bufferWindowMemory.clear();",
"score": 0.7972633242607117
},
{
"filename": "src/index.ts",
"retrieved_chunk": " output.write(chalk.green('\\nStart chatting or type /help for a list of commands\\n'));\n const userInput = await rl.question('> ');\n let response;\n if (userInput.startsWith('/')) {\n const [command, ...args] = userInput.slice(1).split(' ');\n await commandHandler.execute(command, args, output);\n } else {\n const memoryVectorStore = await getMemoryVectorStore();\n const contextVectorStore = await getContextVectorStore();\n const question = sanitizeInput(userInput);",
"score": 0.7821045517921448
}
] | typescript | const documents = await Promise.all(filesToAdd.map((filePath) => loadAndSplitFile(filePath))); |
import type { ReadonlyDeep } from 'type-fest';
import vscode from 'vscode';
import fs from 'fs-extra';
import { log } from './log';
import { TYPE_KIND } from './helpers/type-analyzer/constants';
interface ExtensionConfig {
/** @default true */
enabled: boolean;
/** @default `{$ExtensionRootPath}/res/type-icon.png` */
typeIconPath: string;
/** @default [] */
ignoreTypeKinds: TYPE_KIND[];
}
const defaultTypeIconPath = `${__dirname}/../res/type-icon.png`;
export class Config {
private static _instance: Config;
/** instance */
static get i(): Config {
return (Config._instance ??= new Config());
}
get(): ReadonlyDeep<ExtensionConfig> {
return Object.freeze(this.config);
}
private sync() {
const config = vscode.workspace.getConfiguration('ts-type-hidden');
this.config = {
enabled: config.get('enabled', true),
typeIconPath: config.get('typeIconPath') || defaultTypeIconPath,
ignoreTypeKinds: config.get('ignoreTypeKinds', [])
} satisfies ExtensionConfig;
}
private config!: ExtensionConfig;
private watchCallbacks: Array<Function> = [];
private constructor() {
this.sync();
this.verify();
this.watch();
}
update() {
this.sync();
| log.appendLine(`Config updated:
${JSON.stringify(this.config, null, 2)} |
`);
}
registerWatchCallback(fn: Function) {
this.watchCallbacks.push(fn);
}
private verify() {
if (!fs.existsSync(this.config.typeIconPath)) {
vscode.window.showErrorMessage(
'[ts-type-hidden configuration]: \n`typeIconPath` is not a valid path'
);
this.config.typeIconPath = defaultTypeIconPath;
}
for (let i = this.config.ignoreTypeKinds.length - 1; i >= 0; i--) {
const typeKindToIgnore = this.config.ignoreTypeKinds[i];
const isInvalid = !Object.values(TYPE_KIND).includes(typeKindToIgnore);
if (isInvalid) {
this.config.ignoreTypeKinds.splice(i, 1);
vscode.window.showErrorMessage(
`[ts-type-hidden configuration]: \n\`ignoreTypeKinds.${typeKindToIgnore}\` is not a valid value`
);
}
}
}
private watch() {
vscode.workspace.onDidChangeConfiguration(() => {
this.update();
this.verify();
this.watchCallbacks.forEach(cb => cb());
});
}
}
| src/core/config.ts | xlboy-ts-type-hidden-a749a29 | [
{
"filename": "src/core/editor-context.ts",
"retrieved_chunk": " }\n public static init() {\n EditorContext._instance = new EditorContext();\n }\n private editors = new Map</* filePath */ string, EditorInfo>();\n private curFocusedTypes: AnalyzedType[] = [];\n private constructor() {\n this.register();\n this.initVisibleEditors();\n this.decoration.init();",
"score": 0.7175135612487793
},
{
"filename": "src/core/global-state.ts",
"retrieved_chunk": "import vscode from 'vscode';\nexport class GlobalState {\n private static _instance: GlobalState;\n /** instance */\n public static get i(): GlobalState {\n if (!GlobalState._instance) {\n throw new Error('GlobalState not initialized');\n }\n return GlobalState._instance;\n }",
"score": 0.7109309434890747
},
{
"filename": "src/core/status-bar.ts",
"retrieved_chunk": "import vscode from 'vscode';\nexport class StatusBar {\n private static _instance: StatusBar;\n /** instance */\n public static get i(): StatusBar {\n if (!StatusBar._instance) {\n throw new Error('StatusBar not initialized');\n }\n return StatusBar._instance;\n }",
"score": 0.7081672549247742
},
{
"filename": "src/core/editor-context.ts",
"retrieved_chunk": " foldedTypeRanges: FoldingRange[];\n}\nexport class EditorContext {\n private static _instance: EditorContext;\n /** instance */\n public static get i(): EditorContext {\n if (!EditorContext._instance) {\n throw new Error('EditorContext not initialized');\n }\n return EditorContext._instance;",
"score": 0.707461953163147
},
{
"filename": "src/core/helpers/type-analyzer/index.test.ts",
"retrieved_chunk": " });\n it('constructor', () => {\n const analyzer = new TypeAnalyzer(`\nclass A {\n constructor(a: number) {}\n}\n `);\n analyzer.analyze();\n expect(analyzer.analyzedTypes).toMatchObject([\n {",
"score": 0.7062913179397583
}
] | typescript | log.appendLine(`Config updated:
${JSON.stringify(this.config, null, 2)} |
import chalk from 'chalk';
import { stdout as output } from 'node:process';
import { OpenAIEmbeddings } from 'langchain/embeddings/openai';
import { HNSWLib } from 'langchain/vectorstores/hnswlib';
import { JSONLoader } from 'langchain/document_loaders/fs/json';
import { TextLoader } from 'langchain/document_loaders/fs/text';
import { PDFLoader } from 'langchain/document_loaders/fs/pdf';
import { DocxLoader } from 'langchain/document_loaders/fs/docx';
import { EPubLoader } from 'langchain/document_loaders/fs/epub';
import { CSVLoader } from 'langchain/document_loaders/fs/csv';
import ora from 'ora';
import { MarkdownTextSplitter, RecursiveCharacterTextSplitter } from 'langchain/text_splitter';
import { Document } from 'langchain/document';
import path from 'path';
import { YoutubeTranscript } from 'youtube-transcript';
import getDirectoryListWithDetails from '../utils/getDirectoryListWithDetails.js';
import createDirectory from '../utils/createDirectory.js';
import { getConfig, getDefaultOraOptions, getProjectRoot, setCurrentVectorStoreDatabasePath } from '../config/index.js';
import getDirectoryFiles from '../utils/getDirectoryFiles.js';
import WebCrawler from './crawler.js';
const projectRootDir = getProjectRoot();
const defaultOraOptions = getDefaultOraOptions(output);
/**
* This function loads and splits a file based on its extension using different loaders and text
* splitters.
* @param {string} filePath - A string representing the path to the file that needs to be loaded and
* split into documents.
* @returns The function `loadAndSplitFile` returns a Promise that resolves to an array of `Document`
* objects, where each `Document` represents a split portion of the input file. The type of the
* `Document` object is `Document<Record<string, unknown>>`, which means it has a generic type
* parameter that is an object with string keys and unknown values.
*/
async function loadAndSplitFile(filePath: string): Promise<Document<Record<string, unknown>>[]> {
const fileExtension = path.extname(filePath);
let loader;
let documents: Document<Record<string, unknown>>[];
switch (fileExtension) {
case '.json':
loader = new JSONLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.txt':
loader = new TextLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.md':
loader = new TextLoader(filePath);
documents = await loader.loadAndSplit(new MarkdownTextSplitter());
break;
case '.pdf':
loader = new PDFLoader(filePath, { splitPages: false });
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.docx':
loader = new DocxLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.csv':
loader = new CSVLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.epub':
loader = new EPubLoader(filePath, { splitChapters: false });
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
default:
throw new Error(`Unsupported file extension: ${fileExtension}`);
}
return documents;
}
/**
* This function loads or creates a vector store using HNSWLib and OpenAIEmbeddings.
* @returns The function `loadOrCreateVectorStore` returns a Promise that resolves to an instance of
* the `HNSWLib` class, which is a vector store used for storing and searching high-dimensional
* vectors.
*/
async function loadOrCreateVectorStore(): Promise<HNSWLib> {
let vectorStore: HNSWLib;
let spinner;
await createDirectory( | getConfig().currentVectorStoreDatabasePath); |
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));
} catch {
spinner = ora({
...defaultOraOptions,
text: chalk.blue(`Creating new Context Vector Store in the ${dbDirectory} directory`),
}).start();
const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');
const filesToAdd = await getDirectoryFiles(docsDirectory);
const documents = await Promise.all(filesToAdd.map((filePath) => loadAndSplitFile(filePath)));
const flattenedDocuments = documents.reduce((acc, val) => acc.concat(val), []);
vectorStore = await HNSWLib.fromDocuments(flattenedDocuments, new OpenAIEmbeddings({ maxConcurrency: 5 }));
await vectorStore.save(dbDirectory);
spinner.succeed();
}
return vectorStore;
}
const contextVectorStore = await loadOrCreateVectorStore();
const contextWrapper = {
contextInstance: contextVectorStore,
};
/**
* This function loads or creates a new empty Context Vector Store using HNSWLib and OpenAIEmbeddings.
* @returns a Promise that resolves to an instance of the HNSWLib class, which represents a
* hierarchical navigable small world graph used for nearest neighbor search. The instance is either
* loaded from an existing directory or created as a new empty Context Vector Store with specified
* parameters.
*/
async function loadOrCreateEmptyVectorStore(subDirectory: string): Promise<HNSWLib> {
let vectorStore: HNSWLib;
let spinner;
const newContextVectorStorePath = path.join(projectRootDir, process.env.VECTOR_STORE_BASE_DIR || 'db', subDirectory);
await createDirectory(newContextVectorStorePath);
setCurrentVectorStoreDatabasePath(newContextVectorStorePath);
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));
output.write(chalk.blue(`Using Context Vector Store in the ${dbDirectory} directory\n`));
} catch {
spinner = ora({
...defaultOraOptions,
text: chalk.blue(`Creating new empty Context Vector Store in the ${dbDirectory} directory`),
}).start();
vectorStore = new HNSWLib(new OpenAIEmbeddings({ maxConcurrency: 5 }), {
space: 'cosine',
numDimensions: 1536,
});
spinner.succeed();
output.write(
chalk.red.bold(
`\nThe Context Vector Store is currently empty and unsaved, add context to is using \`/add-docs\`, \`/add-url\` or \`/add-youtube\``
)
);
}
contextWrapper.contextInstance = vectorStore;
return vectorStore;
}
async function getContextVectorStore() {
return contextWrapper.contextInstance;
}
/**
* This function adds documents to a context vector store and saves them.
* @param {string[]} filePaths - The `filePaths` parameter is an array of strings representing the file
* paths of the documents that need to be added to the Context Vector Store.
* @returns nothing (`undefined`).
*/
async function addDocument(filePaths: string[]) {
let spinner;
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
spinner = ora({ ...defaultOraOptions, text: `Adding files to the Context Vector Store` }).start();
const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');
const documents = await Promise.all(
filePaths.map((filePath) => loadAndSplitFile(path.join(docsDirectory, filePath)))
);
const flattenedDocuments = documents.reduce((acc, val) => acc.concat(val), []);
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(flattenedDocuments);
await vectorStore.save(dbDirectory);
spinner.succeed();
return;
} catch (error) {
if (spinner) {
spinner.fail(chalk.red(error));
} else {
output.write(chalk.red(error));
}
}
}
/**
* The function adds a YouTube video transcript to a Context Vector Store.
* @param {string} URLOrVideoID - The URLOrVideoID parameter is a string that represents either the URL
* or the video ID of a YouTube video.
* @returns Nothing is being returned explicitly in the code, but the function is expected to return
* undefined after completing its execution.
*/
async function addYouTube(URLOrVideoID: string) {
let spinner;
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
spinner = ora({
...defaultOraOptions,
text: `Adding Video transcript from ${URLOrVideoID} to the Context Vector Store`,
}).start();
const transcript = await YoutubeTranscript.fetchTranscript(URLOrVideoID);
const text = transcript.map((part) => part.text).join(' ');
const splitter = new RecursiveCharacterTextSplitter();
const videoDocs = await splitter.splitDocuments([
new Document({
pageContent: text,
}),
]);
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(videoDocs);
await vectorStore.save(dbDirectory);
spinner.succeed();
return;
} catch (error) {
if (spinner) {
spinner.fail(chalk.red(error));
} else {
output.write(chalk.red(error));
}
}
}
/**
* The function crawls a given URL, extracts text from the pages, splits the text into documents,
* generates embeddings for the documents, and saves them to a vector store.
* @param {string} URL - The URL of the website to crawl and extract text from.
* @param {string} selector - The selector parameter is a string that represents a CSS selector used to
* identify the HTML elements to be crawled on the web page. The WebCrawler will only crawl the
* elements that match the selector.
* @param {number} maxPages - The maximum number of pages to crawl for the given URL.
* @param {number} numberOfCharactersRequired - `numberOfCharactersRequired` is a number that specifies
* the minimum number of characters required for a document to be considered valid and used for
* generating embeddings. Any document with less than this number of characters will be discarded.
* @returns Nothing is being returned explicitly in the function, but it is implied that the function
* will return undefined if there are no errors.
*/
async function addURL(URL: string, selector: string, maxPages: number, numberOfCharactersRequired: number) {
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
const addUrlSpinner = ora({ ...defaultOraOptions, text: `Crawling ${URL}` });
let documents;
try {
addUrlSpinner.start();
const progressCallback = (linksFound: number, linksCrawled: number, currentUrl: string) => {
addUrlSpinner.text = `Links found: ${linksFound} - Links crawled: ${linksCrawled} - Crawling ${currentUrl}`;
};
const crawler = new WebCrawler([URL], progressCallback, selector, maxPages, numberOfCharactersRequired);
const pages = (await crawler.start()) as Page[];
documents = await Promise.all(
pages.map((row) => {
const splitter = new RecursiveCharacterTextSplitter();
const webDocs = splitter.splitDocuments([
new Document({
pageContent: row.text,
}),
]);
return webDocs;
})
);
addUrlSpinner.succeed();
} catch (error) {
addUrlSpinner.fail(chalk.red(error));
}
if (documents) {
const generateEmbeddingsSpinner = ora({ ...defaultOraOptions, text: `Generating Embeddings` });
try {
const flattenedDocuments = documents.flat();
generateEmbeddingsSpinner.text = `Generating Embeddings for ${flattenedDocuments.length} documents`;
generateEmbeddingsSpinner.start();
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(flattenedDocuments);
await vectorStore.save(dbDirectory);
generateEmbeddingsSpinner.succeed();
return;
} catch (error) {
generateEmbeddingsSpinner.fail(chalk.red(error));
}
}
}
async function listContextStores() {
const projectRoot = getProjectRoot(); // Please replace this with your actual function to get the project root
const vectorStoreDir = process.env.VECTOR_STORE_BASE_DIR || 'db';
const targetDir = path.join(projectRoot, vectorStoreDir);
const contextVectorStoresList = await getDirectoryListWithDetails(targetDir);
output.write(chalk.blue(`Context Vector Stores in ${targetDir}:\n\n`));
Object.entries(contextVectorStoresList).forEach(([dir, files]) => {
output.write(chalk.yellow(`Directory: ${dir}`));
if (dir === getConfig().currentVectorStoreDatabasePath) {
output.write(chalk.green(` (Currently selected)`));
}
output.write('\n');
files.forEach((file) => {
output.write(chalk.yellow(` File: ${file.name}, Size: ${file.size} KB\n`));
});
});
}
export { getContextVectorStore, addDocument, addURL, addYouTube, listContextStores, loadOrCreateEmptyVectorStore };
| src/lib/contextManager.ts | gmickel-memorybot-bad0302 | [
{
"filename": "src/lib/vectorStoreUtils.ts",
"retrieved_chunk": "import { HNSWLib } from 'langchain/vectorstores/hnswlib';\n/**\n * Retrieves relevant context for the given question by performing a similarity search on the provided vector store.\n * @param {HNSWLib} vectorStore - HNSWLib is a library for approximate nearest neighbor search, used to\n * search for similar vectors in a high-dimensional space.\n * @param {string} sanitizedQuestion - The sanitized version of the question that needs to be answered.\n * It is a string input.\n * @param {number} numDocuments - The `numDocuments` parameter is the number of documents that the\n * `getRelevantContext` function should retrieve from the `vectorStore` based on their similarity to\n * the `sanitizedQuestion`.",
"score": 0.8520009517669678
},
{
"filename": "src/lib/vectorStoreUtils.ts",
"retrieved_chunk": " * @returns The function `getRelevantContext` is returning a Promise that resolves to a string. The\n * string is the concatenation of the `pageContent` property of the top `numDocuments` documents\n * returned by a similarity search performed on a `vectorStore` using the `sanitizedQuestion` as the\n * query. The resulting string is trimmed and all newline characters are replaced with spaces.\n */\nasync function getRelevantContext(\n vectorStore: HNSWLib,\n sanitizedQuestion: string,\n numDocuments: number\n): Promise<string> {",
"score": 0.8315553069114685
},
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": " }\n return chalk.red(`All files in the memory directory have been deleted: ${error}`);\n }\n}\nasync function resetMemoryVectorStore(onReset: (newMemoryVectorStore: HNSWLib) => void) {\n const newMemoryVectorStore = new HNSWLib(new OpenAIEmbeddings(), {\n space: 'cosine',\n numDimensions: 1536,\n });\n await deleteMemoryDirectory();",
"score": 0.7944250106811523
},
{
"filename": "src/commands/command.ts",
"retrieved_chunk": "/**\n * The function creates a command object with a name, aliases, description, and an execute function\n * that returns a Promise.\n * @param {string} name - A string representing the name of the command.\n * @param {string[]} aliases - An array of alternative names that can be used to call the command. For\n * example, if the command is named \"help\", aliases could include \"h\" or \"info\".\n * @param {string} description - A brief description of what the command does.\n * @param execute - The `execute` parameter is a function that takes in three arguments:\n * @returns A `Command` object is being returned.\n */",
"score": 0.7732008695602417
},
{
"filename": "src/utils/resolveURL.ts",
"retrieved_chunk": "/**\n * The function resolves a URL from a given base URL and returns the resolved URL as a string.\n * @param {string} from - The `from` parameter is a string representing the base URL that the `to`\n * parameter will be resolved against. It can be an absolute or relative URL.\n * @param {string} to - The `to` parameter is a string representing the URL that needs to be resolved.\n * It can be an absolute URL or a relative URL.\n * @returns The function `resolve` returns a string that represents the resolved URL. If the `to`\n * parameter is a relative URL, the function returns a string that represents the resolved URL relative\n * to the `from` parameter. If the `to` parameter is an absolute URL, the function returns a string\n * that represents the resolved URL.",
"score": 0.7532247304916382
}
] | typescript | getConfig().currentVectorStoreDatabasePath); |
import chalk from 'chalk';
import { stdout as output } from 'node:process';
import { OpenAIEmbeddings } from 'langchain/embeddings/openai';
import { HNSWLib } from 'langchain/vectorstores/hnswlib';
import { JSONLoader } from 'langchain/document_loaders/fs/json';
import { TextLoader } from 'langchain/document_loaders/fs/text';
import { PDFLoader } from 'langchain/document_loaders/fs/pdf';
import { DocxLoader } from 'langchain/document_loaders/fs/docx';
import { EPubLoader } from 'langchain/document_loaders/fs/epub';
import { CSVLoader } from 'langchain/document_loaders/fs/csv';
import ora from 'ora';
import { MarkdownTextSplitter, RecursiveCharacterTextSplitter } from 'langchain/text_splitter';
import { Document } from 'langchain/document';
import path from 'path';
import { YoutubeTranscript } from 'youtube-transcript';
import getDirectoryListWithDetails from '../utils/getDirectoryListWithDetails.js';
import createDirectory from '../utils/createDirectory.js';
import { getConfig, getDefaultOraOptions, getProjectRoot, setCurrentVectorStoreDatabasePath } from '../config/index.js';
import getDirectoryFiles from '../utils/getDirectoryFiles.js';
import WebCrawler from './crawler.js';
const projectRootDir = getProjectRoot();
const defaultOraOptions = getDefaultOraOptions(output);
/**
* This function loads and splits a file based on its extension using different loaders and text
* splitters.
* @param {string} filePath - A string representing the path to the file that needs to be loaded and
* split into documents.
* @returns The function `loadAndSplitFile` returns a Promise that resolves to an array of `Document`
* objects, where each `Document` represents a split portion of the input file. The type of the
* `Document` object is `Document<Record<string, unknown>>`, which means it has a generic type
* parameter that is an object with string keys and unknown values.
*/
async function loadAndSplitFile(filePath: string): Promise<Document<Record<string, unknown>>[]> {
const fileExtension = path.extname(filePath);
let loader;
let documents: Document<Record<string, unknown>>[];
switch (fileExtension) {
case '.json':
loader = new JSONLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.txt':
loader = new TextLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.md':
loader = new TextLoader(filePath);
documents = await loader.loadAndSplit(new MarkdownTextSplitter());
break;
case '.pdf':
loader = new PDFLoader(filePath, { splitPages: false });
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.docx':
loader = new DocxLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.csv':
loader = new CSVLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.epub':
loader = new EPubLoader(filePath, { splitChapters: false });
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
default:
throw new Error(`Unsupported file extension: ${fileExtension}`);
}
return documents;
}
/**
* This function loads or creates a vector store using HNSWLib and OpenAIEmbeddings.
* @returns The function `loadOrCreateVectorStore` returns a Promise that resolves to an instance of
* the `HNSWLib` class, which is a vector store used for storing and searching high-dimensional
* vectors.
*/
async function loadOrCreateVectorStore(): Promise<HNSWLib> {
let vectorStore: HNSWLib;
let spinner;
await createDirectory(getConfig().currentVectorStoreDatabasePath);
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));
} catch {
spinner = ora({
...defaultOraOptions,
text: chalk.blue(`Creating new Context Vector Store in the ${dbDirectory} directory`),
}).start();
const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');
const filesToAdd = await getDirectoryFiles(docsDirectory);
const documents = await Promise.all(filesToAdd.map((filePath) => loadAndSplitFile(filePath)));
| const flattenedDocuments = documents.reduce((acc, val) => acc.concat(val), []); |
vectorStore = await HNSWLib.fromDocuments(flattenedDocuments, new OpenAIEmbeddings({ maxConcurrency: 5 }));
await vectorStore.save(dbDirectory);
spinner.succeed();
}
return vectorStore;
}
const contextVectorStore = await loadOrCreateVectorStore();
const contextWrapper = {
contextInstance: contextVectorStore,
};
/**
* This function loads or creates a new empty Context Vector Store using HNSWLib and OpenAIEmbeddings.
* @returns a Promise that resolves to an instance of the HNSWLib class, which represents a
* hierarchical navigable small world graph used for nearest neighbor search. The instance is either
* loaded from an existing directory or created as a new empty Context Vector Store with specified
* parameters.
*/
async function loadOrCreateEmptyVectorStore(subDirectory: string): Promise<HNSWLib> {
let vectorStore: HNSWLib;
let spinner;
const newContextVectorStorePath = path.join(projectRootDir, process.env.VECTOR_STORE_BASE_DIR || 'db', subDirectory);
await createDirectory(newContextVectorStorePath);
setCurrentVectorStoreDatabasePath(newContextVectorStorePath);
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));
output.write(chalk.blue(`Using Context Vector Store in the ${dbDirectory} directory\n`));
} catch {
spinner = ora({
...defaultOraOptions,
text: chalk.blue(`Creating new empty Context Vector Store in the ${dbDirectory} directory`),
}).start();
vectorStore = new HNSWLib(new OpenAIEmbeddings({ maxConcurrency: 5 }), {
space: 'cosine',
numDimensions: 1536,
});
spinner.succeed();
output.write(
chalk.red.bold(
`\nThe Context Vector Store is currently empty and unsaved, add context to is using \`/add-docs\`, \`/add-url\` or \`/add-youtube\``
)
);
}
contextWrapper.contextInstance = vectorStore;
return vectorStore;
}
async function getContextVectorStore() {
return contextWrapper.contextInstance;
}
/**
* This function adds documents to a context vector store and saves them.
* @param {string[]} filePaths - The `filePaths` parameter is an array of strings representing the file
* paths of the documents that need to be added to the Context Vector Store.
* @returns nothing (`undefined`).
*/
async function addDocument(filePaths: string[]) {
let spinner;
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
spinner = ora({ ...defaultOraOptions, text: `Adding files to the Context Vector Store` }).start();
const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');
const documents = await Promise.all(
filePaths.map((filePath) => loadAndSplitFile(path.join(docsDirectory, filePath)))
);
const flattenedDocuments = documents.reduce((acc, val) => acc.concat(val), []);
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(flattenedDocuments);
await vectorStore.save(dbDirectory);
spinner.succeed();
return;
} catch (error) {
if (spinner) {
spinner.fail(chalk.red(error));
} else {
output.write(chalk.red(error));
}
}
}
/**
* The function adds a YouTube video transcript to a Context Vector Store.
* @param {string} URLOrVideoID - The URLOrVideoID parameter is a string that represents either the URL
* or the video ID of a YouTube video.
* @returns Nothing is being returned explicitly in the code, but the function is expected to return
* undefined after completing its execution.
*/
async function addYouTube(URLOrVideoID: string) {
let spinner;
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
spinner = ora({
...defaultOraOptions,
text: `Adding Video transcript from ${URLOrVideoID} to the Context Vector Store`,
}).start();
const transcript = await YoutubeTranscript.fetchTranscript(URLOrVideoID);
const text = transcript.map((part) => part.text).join(' ');
const splitter = new RecursiveCharacterTextSplitter();
const videoDocs = await splitter.splitDocuments([
new Document({
pageContent: text,
}),
]);
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(videoDocs);
await vectorStore.save(dbDirectory);
spinner.succeed();
return;
} catch (error) {
if (spinner) {
spinner.fail(chalk.red(error));
} else {
output.write(chalk.red(error));
}
}
}
/**
* The function crawls a given URL, extracts text from the pages, splits the text into documents,
* generates embeddings for the documents, and saves them to a vector store.
* @param {string} URL - The URL of the website to crawl and extract text from.
* @param {string} selector - The selector parameter is a string that represents a CSS selector used to
* identify the HTML elements to be crawled on the web page. The WebCrawler will only crawl the
* elements that match the selector.
* @param {number} maxPages - The maximum number of pages to crawl for the given URL.
* @param {number} numberOfCharactersRequired - `numberOfCharactersRequired` is a number that specifies
* the minimum number of characters required for a document to be considered valid and used for
* generating embeddings. Any document with less than this number of characters will be discarded.
* @returns Nothing is being returned explicitly in the function, but it is implied that the function
* will return undefined if there are no errors.
*/
async function addURL(URL: string, selector: string, maxPages: number, numberOfCharactersRequired: number) {
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
const addUrlSpinner = ora({ ...defaultOraOptions, text: `Crawling ${URL}` });
let documents;
try {
addUrlSpinner.start();
const progressCallback = (linksFound: number, linksCrawled: number, currentUrl: string) => {
addUrlSpinner.text = `Links found: ${linksFound} - Links crawled: ${linksCrawled} - Crawling ${currentUrl}`;
};
const crawler = new WebCrawler([URL], progressCallback, selector, maxPages, numberOfCharactersRequired);
const pages = (await crawler.start()) as Page[];
documents = await Promise.all(
pages.map((row) => {
const splitter = new RecursiveCharacterTextSplitter();
const webDocs = splitter.splitDocuments([
new Document({
pageContent: row.text,
}),
]);
return webDocs;
})
);
addUrlSpinner.succeed();
} catch (error) {
addUrlSpinner.fail(chalk.red(error));
}
if (documents) {
const generateEmbeddingsSpinner = ora({ ...defaultOraOptions, text: `Generating Embeddings` });
try {
const flattenedDocuments = documents.flat();
generateEmbeddingsSpinner.text = `Generating Embeddings for ${flattenedDocuments.length} documents`;
generateEmbeddingsSpinner.start();
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(flattenedDocuments);
await vectorStore.save(dbDirectory);
generateEmbeddingsSpinner.succeed();
return;
} catch (error) {
generateEmbeddingsSpinner.fail(chalk.red(error));
}
}
}
async function listContextStores() {
const projectRoot = getProjectRoot(); // Please replace this with your actual function to get the project root
const vectorStoreDir = process.env.VECTOR_STORE_BASE_DIR || 'db';
const targetDir = path.join(projectRoot, vectorStoreDir);
const contextVectorStoresList = await getDirectoryListWithDetails(targetDir);
output.write(chalk.blue(`Context Vector Stores in ${targetDir}:\n\n`));
Object.entries(contextVectorStoresList).forEach(([dir, files]) => {
output.write(chalk.yellow(`Directory: ${dir}`));
if (dir === getConfig().currentVectorStoreDatabasePath) {
output.write(chalk.green(` (Currently selected)`));
}
output.write('\n');
files.forEach((file) => {
output.write(chalk.yellow(` File: ${file.name}, Size: ${file.size} KB\n`));
});
});
}
export { getContextVectorStore, addDocument, addURL, addYouTube, listContextStores, loadOrCreateEmptyVectorStore };
| src/lib/contextManager.ts | gmickel-memorybot-bad0302 | [
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": " documents: Array<{ content: string; metadataType: string }>\n): Promise<void> {\n const formattedDocuments = documents.map(\n (doc) => new Document({ pageContent: doc.content, metadata: { type: doc.metadataType } })\n );\n await memoryWrapper.vectorStoreInstance.addDocuments(formattedDocuments);\n await saveMemoryVectorStore();\n}\nfunction resetBufferWindowMemory() {\n bufferWindowMemory.clear();",
"score": 0.8083463311195374
},
{
"filename": "src/utils/getDirectoryFiles.ts",
"retrieved_chunk": "import path from 'path';\nimport fs from 'node:fs/promises';\nexport default async function getDirectoryFiles(directoryPath: string): Promise<string[]> {\n const fileNames = await fs.readdir(directoryPath);\n const filePathsPromises = fileNames.map(async (fileName) => {\n const filePath = path.join(directoryPath, fileName);\n const stat = await fs.stat(filePath);\n if (stat.isDirectory()) {\n const subDirectoryFiles = await getDirectoryFiles(filePath);\n return subDirectoryFiles;",
"score": 0.8042681217193604
},
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": "}\nasync function deleteMemoryDirectory() {\n try {\n const files = await fs.readdir(memoryDirectory);\n const deletePromises = files.map((file) => fs.unlink(path.join(memoryDirectory, file)));\n await Promise.all(deletePromises);\n return `All files in the memory directory have been deleted.`;\n } catch (error) {\n if (error instanceof Error) {\n return chalk.red(`All files in the memory directory have been deleted: ${error.message}`);",
"score": 0.8036755323410034
},
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": "const memoryDirectory = path.join(projectRootDir, process.env.MEMORY_VECTOR_STORE_DIR || 'memory');\nlet memoryVectorStore: HNSWLib;\ntry {\n memoryVectorStore = await HNSWLib.load(memoryDirectory, new OpenAIEmbeddings());\n} catch {\n output.write(`${chalk.blue(`Creating a new memory vector store index in the ${memoryDirectory} directory`)}\\n`);\n memoryVectorStore = new HNSWLib(new OpenAIEmbeddings(), {\n space: 'cosine',\n numDimensions: 1536,\n });",
"score": 0.7870887517929077
},
{
"filename": "src/config/index.ts",
"retrieved_chunk": " return {\n text: 'Loading',\n stream: output,\n discardStdin: false,\n };\n}\nconst defaultConfig: Config = {\n currentVectorStoreDatabasePath: path.join(getProjectRoot(), process.env.VECTOR_STORE_DIR || 'db/default'),\n numContextDocumentsToRetrieve: 6,\n numMemoryDocumentsToRetrieve: 4,",
"score": 0.7806493043899536
}
] | typescript | const flattenedDocuments = documents.reduce((acc, val) => acc.concat(val), []); |
import * as cheerio from 'cheerio';
import Crawler, { CrawlerRequestResponse } from 'crawler';
import { stderr } from 'node:process';
import resolveURL from '../utils/resolveURL.js';
// import TurndownService from 'turndown';
// const turndownService = new TurndownService();
type ProgressCallback = (linksFound: number, linksCrawled: number, currentUrl: string) => void;
interface Page {
url: string;
text: string;
title: string;
}
/* The WebCrawler class is a TypeScript implementation of a web crawler that can extract text from web
pages and follow links to crawl more pages. */
class WebCrawler {
pages: Page[];
limit: number;
urls: string[];
count: number;
textLengthMinimum: number;
selector: string;
progressCallback: ProgressCallback;
crawler: Crawler;
constructor(
urls: string[],
progressCallback: ProgressCallback,
selector = 'body',
limit = 20,
textLengthMinimum = 200
) {
this.urls = urls;
this.selector = selector;
this.limit = limit;
this.textLengthMinimum = textLengthMinimum;
this.progressCallback = progressCallback;
this.count = 0;
this.pages = [];
this.crawler = new Crawler({
maxConnections: 10,
callback: this.handleRequest,
userAgent: 'node-crawler',
});
}
/* `handleRequest` is a method that handles the response of a web page request made by the `crawler`
object. It takes in three parameters: `error`, `res`, and `done`. */
handleRequest = (error: Error | null, res: CrawlerRequestResponse, done: () => void) => {
if (error) {
stderr.write(error.message);
done();
return;
}
const $ = cheerio.load(res.body);
// Remove obviously superfluous elements
$('script').remove();
$('header').remove();
$('nav').remove();
$('style').remove();
$('img').remove();
$('svg').remove();
const title = $('title').text() || '';
const text = $(this.selector).text();
// const text = turndownService.turndown(html || '');
const page: Page = {
url: res.request.uri.href,
text,
title,
};
if (text.length > this.textLengthMinimum) {
this.pages.push(page);
this.progressCallback(this.count + 1, this.pages.length, res.request.uri.href);
}
$('a').each((_i: number, elem: cheerio.Element) => {
if (this.count >= this.limit) {
return false; // Stop iterating once the limit is reached
}
const href = $(elem).attr('href')?.split('#')[0];
const uri = res.request.uri.href;
const | url = href && resolveURL(uri, href); |
// crawl more
if (url && this.urls.some((u) => url.includes(u))) {
this.crawler.queue(url);
this.count += 1;
}
return true; // Continue iterating when the limit is not reached
});
done();
};
start = async () => {
this.pages = [];
return new Promise((resolve) => {
this.crawler.on('drain', () => {
resolve(this.pages);
});
this.urls.forEach((url) => {
this.crawler.queue(url);
});
});
};
}
export default WebCrawler;
| src/lib/crawler.ts | gmickel-memorybot-bad0302 | [
{
"filename": "src/utils/resolveURL.ts",
"retrieved_chunk": " */\nexport default function resolve(from: string, to: string) {\n const resolvedUrl = new URL(to, new URL(from, 'resolve://'));\n if (resolvedUrl.protocol === 'resolve:') {\n // `from` is a relative URL.\n const { pathname, search, hash } = resolvedUrl;\n return pathname + search + hash;\n }\n return resolvedUrl.toString();\n}",
"score": 0.7156533598899841
},
{
"filename": "src/commands/addURLCommand.ts",
"retrieved_chunk": " To avoid this, you can try to target a specific selector such as \\`.main\\``,\n async (args, output) => {\n if (!args || args.length > 4) {\n output.write(\n chalk.red(\n 'Invalid number of arguments. Usage: /add-url `url` `selector to extract` `Maximum number of links to follow` `Ignore pages with less than n characters`\\n'\n )\n );\n return;\n }",
"score": 0.6986737847328186
},
{
"filename": "src/commands/addURLCommand.ts",
"retrieved_chunk": " const url = args[0];\n const selector = args[1];\n const maxLinks = parseInt(args[2], 10) || 20;\n const minChars = parseInt(args[3], 10) || 200;\n await addURL(url, selector, maxLinks, minChars);\n }\n);\nexport default addURLCommand;",
"score": 0.6820101737976074
},
{
"filename": "src/utils/getDirectoryListWithDetails.ts",
"retrieved_chunk": " const res = path.resolve(directory, dirent.name);\n if (dirent.isDirectory()) {\n const subdirContents = await getDirectoryListWithDetails(res, newContents);\n Object.assign(newContents, subdirContents);\n } else if (dirent.isFile() && dirent.name !== '.gitignore') {\n const stats = await fs.stat(res);\n files.push({ name: dirent.name, size: Math.ceil(stats.size / 1024) });\n }\n });\n await Promise.all(actions);",
"score": 0.6775293946266174
},
{
"filename": "src/lib/contextManager.ts",
"retrieved_chunk": "async function addURL(URL: string, selector: string, maxPages: number, numberOfCharactersRequired: number) {\n const dbDirectory = getConfig().currentVectorStoreDatabasePath;\n const addUrlSpinner = ora({ ...defaultOraOptions, text: `Crawling ${URL}` });\n let documents;\n try {\n addUrlSpinner.start();\n const progressCallback = (linksFound: number, linksCrawled: number, currentUrl: string) => {\n addUrlSpinner.text = `Links found: ${linksFound} - Links crawled: ${linksCrawled} - Crawling ${currentUrl}`;\n };\n const crawler = new WebCrawler([URL], progressCallback, selector, maxPages, numberOfCharactersRequired);",
"score": 0.6731944680213928
}
] | typescript | url = href && resolveURL(uri, href); |
/* eslint-disable no-await-in-loop */
import dotenv from 'dotenv';
import { OpenAIChat } from 'langchain/llms/openai';
// eslint-disable-next-line import/no-unresolved
import * as readline from 'node:readline/promises';
import path from 'path';
import fs from 'fs';
/* This line of code is importing the `stdin` and `stdout` streams from the `process` module in
Node.js. These streams are used for reading input from the user and writing output to the console,
respectively. */
import { stdin as input, stdout as output } from 'node:process';
import { CallbackManager } from 'langchain/callbacks';
import { ChatPromptTemplate, HumanMessagePromptTemplate, SystemMessagePromptTemplate } from 'langchain/prompts';
import { LLMChain } from 'langchain/chains';
import { oneLine } from 'common-tags';
import chalk from 'chalk';
import logChat from './chatLogger.js';
import createCommandHandler from './commands.js';
import { getMemoryVectorStore, addDocumentsToMemoryVectorStore, getBufferWindowMemory } from './lib/memoryManager.js';
import { getContextVectorStore } from './lib/contextManager.js';
import { getRelevantContext } from './lib/vectorStoreUtils.js';
import sanitizeInput from './utils/sanitizeInput.js';
import { getConfig, getProjectRoot } from './config/index.js';
const projectRootDir = getProjectRoot();
dotenv.config();
// Set up the chat log directory
const chatLogDirectory = path.join(projectRootDir, 'chat_logs');
// Get the prompt template
const systemPromptTemplate = fs.readFileSync(path.join(projectRootDir, 'src/prompt.txt'), 'utf8');
// Set up the readline interface to read input from the user and write output to the console
const rl = readline.createInterface({ input, output });
// Set up CLI commands
const commandHandler: CommandHandler = createCommandHandler();
const callbackManager = CallbackManager.fromHandlers({
// This function is called when the LLM generates a new token (i.e., a prediction for the next word)
async handleLLMNewToken(token: string) {
// Write the token to the output stream (i.e., the console)
output.write(token);
},
});
const llm = new OpenAIChat({
streaming: true,
callbackManager,
modelName: process.env.MODEL || 'gpt-3.5-turbo',
});
const systemPrompt = SystemMessagePromptTemplate.fromTemplate(oneLine`
${systemPromptTemplate}
`);
const chatPrompt = ChatPromptTemplate.fromPromptMessages([
systemPrompt,
HumanMessagePromptTemplate.fromTemplate('QUESTION: """{input}"""'),
]);
const windowMemory = getBufferWindowMemory();
const chain = new LLMChain({
prompt: chatPrompt,
memory: windowMemory,
llm,
});
// eslint-disable-next-line no-constant-condition
while (true) {
output.write(chalk.green('\nStart chatting or type /help for a list of commands\n'));
const userInput = await rl.question('> ');
let response;
if (userInput.startsWith('/')) {
const [command, ...args] = userInput.slice(1).split(' ');
await commandHandler.execute(command, args, output);
} else {
const memoryVectorStore = await getMemoryVectorStore();
const contextVectorStore = await getContextVectorStore();
const question = sanitizeInput(userInput);
const config = getConfig();
| const context = await getRelevantContext(contextVectorStore, question, config.numContextDocumentsToRetrieve); |
const history = await getRelevantContext(memoryVectorStore, question, config.numMemoryDocumentsToRetrieve);
try {
response = await chain.call({
input: question,
context,
history,
immediate_history: config.useWindowMemory ? windowMemory : '',
});
if (response) {
await addDocumentsToMemoryVectorStore([
{ content: question, metadataType: 'question' },
{ content: response.text, metadataType: 'answer' },
]);
await logChat(chatLogDirectory, question, response.response);
}
} catch (error) {
if (error instanceof Error && error.message.includes('Cancel:')) {
// TODO: Handle cancel
} else if (error instanceof Error) {
output.write(chalk.red(error.message));
} else {
output.write(chalk.red(error));
}
}
}
output.write('\n');
}
| src/index.ts | gmickel-memorybot-bad0302 | [
{
"filename": "src/lib/contextManager.ts",
"retrieved_chunk": " try {\n spinner = ora({ ...defaultOraOptions, text: `Adding files to the Context Vector Store` }).start();\n const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');\n const documents = await Promise.all(\n filePaths.map((filePath) => loadAndSplitFile(path.join(docsDirectory, filePath)))\n );\n const flattenedDocuments = documents.reduce((acc, val) => acc.concat(val), []);\n const vectorStore = await getContextVectorStore();\n await vectorStore.addDocuments(flattenedDocuments);\n await vectorStore.save(dbDirectory);",
"score": 0.8140274286270142
},
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": " documents: Array<{ content: string; metadataType: string }>\n): Promise<void> {\n const formattedDocuments = documents.map(\n (doc) => new Document({ pageContent: doc.content, metadata: { type: doc.metadataType } })\n );\n await memoryWrapper.vectorStoreInstance.addDocuments(formattedDocuments);\n await saveMemoryVectorStore();\n}\nfunction resetBufferWindowMemory() {\n bufferWindowMemory.clear();",
"score": 0.7900940775871277
},
{
"filename": "src/lib/contextManager.ts",
"retrieved_chunk": " const dbDirectory = getConfig().currentVectorStoreDatabasePath;\n try {\n vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));\n } catch {\n spinner = ora({\n ...defaultOraOptions,\n text: chalk.blue(`Creating new Context Vector Store in the ${dbDirectory} directory`),\n }).start();\n const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');\n const filesToAdd = await getDirectoryFiles(docsDirectory);",
"score": 0.7842996120452881
},
{
"filename": "src/lib/contextManager.ts",
"retrieved_chunk": " pageContent: text,\n }),\n ]);\n const vectorStore = await getContextVectorStore();\n await vectorStore.addDocuments(videoDocs);\n await vectorStore.save(dbDirectory);\n spinner.succeed();\n return;\n } catch (error) {\n if (spinner) {",
"score": 0.77907794713974
},
{
"filename": "src/lib/contextManager.ts",
"retrieved_chunk": " const documents = await Promise.all(filesToAdd.map((filePath) => loadAndSplitFile(filePath)));\n const flattenedDocuments = documents.reduce((acc, val) => acc.concat(val), []);\n vectorStore = await HNSWLib.fromDocuments(flattenedDocuments, new OpenAIEmbeddings({ maxConcurrency: 5 }));\n await vectorStore.save(dbDirectory);\n spinner.succeed();\n }\n return vectorStore;\n}\nconst contextVectorStore = await loadOrCreateVectorStore();\nconst contextWrapper = {",
"score": 0.7760862708091736
}
] | typescript | const context = await getRelevantContext(contextVectorStore, question, config.numContextDocumentsToRetrieve); |
import { COMMENT_PRE, commentPreReg } from '../constants'
import { parsePlatform } from './parsePlatform'
export function parseComment(code: string) {
if (code.trim().length === 0)
return
const commentResults = [...code.matchAll(commentPreReg)]
if (commentResults.length === 0)
return
const commentAST = []
for (let i = 0; i < commentResults.length; i++) {
const item = commentResults[i]
const index = item.index!
const [self, commentPre, _space, prefix, _platform] = item
if (!COMMENT_PRE.includes(commentPre))
continue
const platform = _platform.trim()
if (platform && prefix !== '#endif') {
const prefixStart = self.indexOf(prefix) + index
const prefixEnd = prefixStart + prefix.length
commentAST.push({
start: prefixStart,
end: prefixEnd,
type: 'prefix',
row: prefix,
})
const platforms = parsePlatform(platform, commentPre)
if (!platforms)
continue
if (platforms.length > 1) {
const orRex = /\|\|/g
const orResult = [...platform.matchAll(orRex)]
const offset = index + self.indexOf(_platform) + 1
orResult.forEach((element) => {
const orStart = offset + element.index!
const orEnd = orStart + 2
commentAST.push({
start: orStart,
end: orEnd,
type: 'prefix',
row: element[0],
})
})
}
platforms.forEach( | (element) => { |
const platformStart = self.indexOf(element) + index
const platformEnd = platformStart + element.length
commentAST.push({
start: platformStart,
end: platformEnd,
type: 'platform',
row: element,
})
})
}
else {
const start = self.indexOf(prefix) + index
const end = start + prefix.length
commentAST.push({
start,
end,
row: prefix,
type: 'prefix',
})
}
}
return commentAST
}
| src/parseComment/index.ts | uni-helper-uni-highlight-vscode-f9002ae | [
{
"filename": "src/getPlatformInfo.ts",
"retrieved_chunk": " platformInfos.push({\n start,\n end,\n type,\n color,\n })\n }\n else if (type === 'platform' && !color) {\n platformInfos.push({\n start,",
"score": 0.8750213384628296
},
{
"filename": "src/getPlatformInfo.ts",
"retrieved_chunk": " const { start, end, type, row } = item\n const color = HIGHTLIGHT_COLOR.platform[row as Platform]\n if (type === 'prefix') {\n platformInfos.push({\n start,\n end,\n type,\n })\n }\n else if (type === 'platform' && color) {",
"score": 0.8495157361030579
},
{
"filename": "src/transformPlatform.ts",
"retrieved_chunk": " highlightRange.platform[color].push(range)\n }\n if (platformInfo.type === 'unPlatform') {\n highlightRange.unPlatform.push({\n range,\n row,\n })\n }\n })\n return highlightRange",
"score": 0.832924485206604
},
{
"filename": "src/transformPlatform.ts",
"retrieved_chunk": "}\nexport interface HighlightRange {\n prefix: Range[]\n platform: {\n [key: string]: Range[]\n }\n unPlatform: {\n range: Range\n row: string\n }[]",
"score": 0.7820762395858765
},
{
"filename": "src/CommentFoldingRangeProvider.ts",
"retrieved_chunk": " const lines = text.split('\\n')\n for (let i = 0; i < lines.length; i++) {\n const { row } = parseComment(lines[i])?.[0] ?? {}\n if (!row)\n continue\n if (row === '#ifdef' || row === '#ifndef') {\n startLines.push(i + 1)\n stack.push(startLines.length - 1)\n }\n else if (row === '#endif') {",
"score": 0.7552276849746704
}
] | typescript | (element) => { |
import chalk from 'chalk';
import { stdout as output } from 'node:process';
import { OpenAIEmbeddings } from 'langchain/embeddings/openai';
import { HNSWLib } from 'langchain/vectorstores/hnswlib';
import { JSONLoader } from 'langchain/document_loaders/fs/json';
import { TextLoader } from 'langchain/document_loaders/fs/text';
import { PDFLoader } from 'langchain/document_loaders/fs/pdf';
import { DocxLoader } from 'langchain/document_loaders/fs/docx';
import { EPubLoader } from 'langchain/document_loaders/fs/epub';
import { CSVLoader } from 'langchain/document_loaders/fs/csv';
import ora from 'ora';
import { MarkdownTextSplitter, RecursiveCharacterTextSplitter } from 'langchain/text_splitter';
import { Document } from 'langchain/document';
import path from 'path';
import { YoutubeTranscript } from 'youtube-transcript';
import getDirectoryListWithDetails from '../utils/getDirectoryListWithDetails.js';
import createDirectory from '../utils/createDirectory.js';
import { getConfig, getDefaultOraOptions, getProjectRoot, setCurrentVectorStoreDatabasePath } from '../config/index.js';
import getDirectoryFiles from '../utils/getDirectoryFiles.js';
import WebCrawler from './crawler.js';
const projectRootDir = getProjectRoot();
const defaultOraOptions = getDefaultOraOptions(output);
/**
* This function loads and splits a file based on its extension using different loaders and text
* splitters.
* @param {string} filePath - A string representing the path to the file that needs to be loaded and
* split into documents.
* @returns The function `loadAndSplitFile` returns a Promise that resolves to an array of `Document`
* objects, where each `Document` represents a split portion of the input file. The type of the
* `Document` object is `Document<Record<string, unknown>>`, which means it has a generic type
* parameter that is an object with string keys and unknown values.
*/
async function loadAndSplitFile(filePath: string): Promise<Document<Record<string, unknown>>[]> {
const fileExtension = path.extname(filePath);
let loader;
let documents: Document<Record<string, unknown>>[];
switch (fileExtension) {
case '.json':
loader = new JSONLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.txt':
loader = new TextLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.md':
loader = new TextLoader(filePath);
documents = await loader.loadAndSplit(new MarkdownTextSplitter());
break;
case '.pdf':
loader = new PDFLoader(filePath, { splitPages: false });
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.docx':
loader = new DocxLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.csv':
loader = new CSVLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.epub':
loader = new EPubLoader(filePath, { splitChapters: false });
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
default:
throw new Error(`Unsupported file extension: ${fileExtension}`);
}
return documents;
}
/**
* This function loads or creates a vector store using HNSWLib and OpenAIEmbeddings.
* @returns The function `loadOrCreateVectorStore` returns a Promise that resolves to an instance of
* the `HNSWLib` class, which is a vector store used for storing and searching high-dimensional
* vectors.
*/
async function loadOrCreateVectorStore(): Promise<HNSWLib> {
let vectorStore: HNSWLib;
let spinner;
await createDirectory(getConfig().currentVectorStoreDatabasePath);
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));
} catch {
spinner = ora({
...defaultOraOptions,
text: chalk.blue(`Creating new Context Vector Store in the ${dbDirectory} directory`),
}).start();
const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');
const filesToAdd = await getDirectoryFiles(docsDirectory);
const documents = await Promise.all(filesToAdd.map((filePath) => loadAndSplitFile(filePath)));
const flattenedDocuments = documents.reduce( | (acc, val) => acc.concat(val), []); |
vectorStore = await HNSWLib.fromDocuments(flattenedDocuments, new OpenAIEmbeddings({ maxConcurrency: 5 }));
await vectorStore.save(dbDirectory);
spinner.succeed();
}
return vectorStore;
}
const contextVectorStore = await loadOrCreateVectorStore();
const contextWrapper = {
contextInstance: contextVectorStore,
};
/**
* This function loads or creates a new empty Context Vector Store using HNSWLib and OpenAIEmbeddings.
* @returns a Promise that resolves to an instance of the HNSWLib class, which represents a
* hierarchical navigable small world graph used for nearest neighbor search. The instance is either
* loaded from an existing directory or created as a new empty Context Vector Store with specified
* parameters.
*/
async function loadOrCreateEmptyVectorStore(subDirectory: string): Promise<HNSWLib> {
let vectorStore: HNSWLib;
let spinner;
const newContextVectorStorePath = path.join(projectRootDir, process.env.VECTOR_STORE_BASE_DIR || 'db', subDirectory);
await createDirectory(newContextVectorStorePath);
setCurrentVectorStoreDatabasePath(newContextVectorStorePath);
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));
output.write(chalk.blue(`Using Context Vector Store in the ${dbDirectory} directory\n`));
} catch {
spinner = ora({
...defaultOraOptions,
text: chalk.blue(`Creating new empty Context Vector Store in the ${dbDirectory} directory`),
}).start();
vectorStore = new HNSWLib(new OpenAIEmbeddings({ maxConcurrency: 5 }), {
space: 'cosine',
numDimensions: 1536,
});
spinner.succeed();
output.write(
chalk.red.bold(
`\nThe Context Vector Store is currently empty and unsaved, add context to is using \`/add-docs\`, \`/add-url\` or \`/add-youtube\``
)
);
}
contextWrapper.contextInstance = vectorStore;
return vectorStore;
}
async function getContextVectorStore() {
return contextWrapper.contextInstance;
}
/**
* This function adds documents to a context vector store and saves them.
* @param {string[]} filePaths - The `filePaths` parameter is an array of strings representing the file
* paths of the documents that need to be added to the Context Vector Store.
* @returns nothing (`undefined`).
*/
async function addDocument(filePaths: string[]) {
let spinner;
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
spinner = ora({ ...defaultOraOptions, text: `Adding files to the Context Vector Store` }).start();
const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');
const documents = await Promise.all(
filePaths.map((filePath) => loadAndSplitFile(path.join(docsDirectory, filePath)))
);
const flattenedDocuments = documents.reduce((acc, val) => acc.concat(val), []);
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(flattenedDocuments);
await vectorStore.save(dbDirectory);
spinner.succeed();
return;
} catch (error) {
if (spinner) {
spinner.fail(chalk.red(error));
} else {
output.write(chalk.red(error));
}
}
}
/**
* The function adds a YouTube video transcript to a Context Vector Store.
* @param {string} URLOrVideoID - The URLOrVideoID parameter is a string that represents either the URL
* or the video ID of a YouTube video.
* @returns Nothing is being returned explicitly in the code, but the function is expected to return
* undefined after completing its execution.
*/
async function addYouTube(URLOrVideoID: string) {
let spinner;
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
spinner = ora({
...defaultOraOptions,
text: `Adding Video transcript from ${URLOrVideoID} to the Context Vector Store`,
}).start();
const transcript = await YoutubeTranscript.fetchTranscript(URLOrVideoID);
const text = transcript.map((part) => part.text).join(' ');
const splitter = new RecursiveCharacterTextSplitter();
const videoDocs = await splitter.splitDocuments([
new Document({
pageContent: text,
}),
]);
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(videoDocs);
await vectorStore.save(dbDirectory);
spinner.succeed();
return;
} catch (error) {
if (spinner) {
spinner.fail(chalk.red(error));
} else {
output.write(chalk.red(error));
}
}
}
/**
* The function crawls a given URL, extracts text from the pages, splits the text into documents,
* generates embeddings for the documents, and saves them to a vector store.
* @param {string} URL - The URL of the website to crawl and extract text from.
* @param {string} selector - The selector parameter is a string that represents a CSS selector used to
* identify the HTML elements to be crawled on the web page. The WebCrawler will only crawl the
* elements that match the selector.
* @param {number} maxPages - The maximum number of pages to crawl for the given URL.
* @param {number} numberOfCharactersRequired - `numberOfCharactersRequired` is a number that specifies
* the minimum number of characters required for a document to be considered valid and used for
* generating embeddings. Any document with less than this number of characters will be discarded.
* @returns Nothing is being returned explicitly in the function, but it is implied that the function
* will return undefined if there are no errors.
*/
async function addURL(URL: string, selector: string, maxPages: number, numberOfCharactersRequired: number) {
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
const addUrlSpinner = ora({ ...defaultOraOptions, text: `Crawling ${URL}` });
let documents;
try {
addUrlSpinner.start();
const progressCallback = (linksFound: number, linksCrawled: number, currentUrl: string) => {
addUrlSpinner.text = `Links found: ${linksFound} - Links crawled: ${linksCrawled} - Crawling ${currentUrl}`;
};
const crawler = new WebCrawler([URL], progressCallback, selector, maxPages, numberOfCharactersRequired);
const pages = (await crawler.start()) as Page[];
documents = await Promise.all(
pages.map((row) => {
const splitter = new RecursiveCharacterTextSplitter();
const webDocs = splitter.splitDocuments([
new Document({
pageContent: row.text,
}),
]);
return webDocs;
})
);
addUrlSpinner.succeed();
} catch (error) {
addUrlSpinner.fail(chalk.red(error));
}
if (documents) {
const generateEmbeddingsSpinner = ora({ ...defaultOraOptions, text: `Generating Embeddings` });
try {
const flattenedDocuments = documents.flat();
generateEmbeddingsSpinner.text = `Generating Embeddings for ${flattenedDocuments.length} documents`;
generateEmbeddingsSpinner.start();
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(flattenedDocuments);
await vectorStore.save(dbDirectory);
generateEmbeddingsSpinner.succeed();
return;
} catch (error) {
generateEmbeddingsSpinner.fail(chalk.red(error));
}
}
}
async function listContextStores() {
const projectRoot = getProjectRoot(); // Please replace this with your actual function to get the project root
const vectorStoreDir = process.env.VECTOR_STORE_BASE_DIR || 'db';
const targetDir = path.join(projectRoot, vectorStoreDir);
const contextVectorStoresList = await getDirectoryListWithDetails(targetDir);
output.write(chalk.blue(`Context Vector Stores in ${targetDir}:\n\n`));
Object.entries(contextVectorStoresList).forEach(([dir, files]) => {
output.write(chalk.yellow(`Directory: ${dir}`));
if (dir === getConfig().currentVectorStoreDatabasePath) {
output.write(chalk.green(` (Currently selected)`));
}
output.write('\n');
files.forEach((file) => {
output.write(chalk.yellow(` File: ${file.name}, Size: ${file.size} KB\n`));
});
});
}
export { getContextVectorStore, addDocument, addURL, addYouTube, listContextStores, loadOrCreateEmptyVectorStore };
| src/lib/contextManager.ts | gmickel-memorybot-bad0302 | [
{
"filename": "src/utils/getDirectoryFiles.ts",
"retrieved_chunk": "import path from 'path';\nimport fs from 'node:fs/promises';\nexport default async function getDirectoryFiles(directoryPath: string): Promise<string[]> {\n const fileNames = await fs.readdir(directoryPath);\n const filePathsPromises = fileNames.map(async (fileName) => {\n const filePath = path.join(directoryPath, fileName);\n const stat = await fs.stat(filePath);\n if (stat.isDirectory()) {\n const subDirectoryFiles = await getDirectoryFiles(filePath);\n return subDirectoryFiles;",
"score": 0.8131814002990723
},
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": "}\nasync function deleteMemoryDirectory() {\n try {\n const files = await fs.readdir(memoryDirectory);\n const deletePromises = files.map((file) => fs.unlink(path.join(memoryDirectory, file)));\n await Promise.all(deletePromises);\n return `All files in the memory directory have been deleted.`;\n } catch (error) {\n if (error instanceof Error) {\n return chalk.red(`All files in the memory directory have been deleted: ${error.message}`);",
"score": 0.8031811714172363
},
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": " documents: Array<{ content: string; metadataType: string }>\n): Promise<void> {\n const formattedDocuments = documents.map(\n (doc) => new Document({ pageContent: doc.content, metadata: { type: doc.metadataType } })\n );\n await memoryWrapper.vectorStoreInstance.addDocuments(formattedDocuments);\n await saveMemoryVectorStore();\n}\nfunction resetBufferWindowMemory() {\n bufferWindowMemory.clear();",
"score": 0.7983850240707397
},
{
"filename": "src/utils/getDirectoryListWithDetails.ts",
"retrieved_chunk": "import fs from 'node:fs/promises';\nimport path from 'path';\nexport default async function getDirectoryListWithDetails(\n directory: string,\n contents: DirectoryContent = {}\n): Promise<DirectoryContent> {\n const dirents = await fs.readdir(directory, { withFileTypes: true });\n const newContents: DirectoryContent = { ...contents };\n const files: FileInfo[] = [];\n const actions = dirents.map(async (dirent) => {",
"score": 0.7726858854293823
},
{
"filename": "src/updateReadme.ts",
"retrieved_chunk": "import fs from 'fs';\nimport path from 'path';\nimport { getProjectRoot } from './config/index.js';\nconst projectRootDir = getProjectRoot();\nconst commandsDir = path.join(projectRootDir, 'src', 'commands');\nconst readmePath = path.join(projectRootDir, 'README.md');\nconst commandFiles = fs.readdirSync(commandsDir).filter((file) => file !== 'command.ts');\nasync function getCommandsMarkdown() {\n const commandsPromises = commandFiles.map(async (file) => {\n const commandModule = await import(path.join(commandsDir, file));",
"score": 0.770926833152771
}
] | typescript | (acc, val) => acc.concat(val), []); |
/* eslint-disable no-await-in-loop */
import dotenv from 'dotenv';
import { OpenAIChat } from 'langchain/llms/openai';
// eslint-disable-next-line import/no-unresolved
import * as readline from 'node:readline/promises';
import path from 'path';
import fs from 'fs';
/* This line of code is importing the `stdin` and `stdout` streams from the `process` module in
Node.js. These streams are used for reading input from the user and writing output to the console,
respectively. */
import { stdin as input, stdout as output } from 'node:process';
import { CallbackManager } from 'langchain/callbacks';
import { ChatPromptTemplate, HumanMessagePromptTemplate, SystemMessagePromptTemplate } from 'langchain/prompts';
import { LLMChain } from 'langchain/chains';
import { oneLine } from 'common-tags';
import chalk from 'chalk';
import logChat from './chatLogger.js';
import createCommandHandler from './commands.js';
import { getMemoryVectorStore, addDocumentsToMemoryVectorStore, getBufferWindowMemory } from './lib/memoryManager.js';
import { getContextVectorStore } from './lib/contextManager.js';
import { getRelevantContext } from './lib/vectorStoreUtils.js';
import sanitizeInput from './utils/sanitizeInput.js';
import { getConfig, getProjectRoot } from './config/index.js';
const projectRootDir = getProjectRoot();
dotenv.config();
// Set up the chat log directory
const chatLogDirectory = path.join(projectRootDir, 'chat_logs');
// Get the prompt template
const systemPromptTemplate = fs.readFileSync(path.join(projectRootDir, 'src/prompt.txt'), 'utf8');
// Set up the readline interface to read input from the user and write output to the console
const rl = readline.createInterface({ input, output });
// Set up CLI commands
const commandHandler: CommandHandler = createCommandHandler();
const callbackManager = CallbackManager.fromHandlers({
// This function is called when the LLM generates a new token (i.e., a prediction for the next word)
async handleLLMNewToken(token: string) {
// Write the token to the output stream (i.e., the console)
output.write(token);
},
});
const llm = new OpenAIChat({
streaming: true,
callbackManager,
modelName: process.env.MODEL || 'gpt-3.5-turbo',
});
const systemPrompt = SystemMessagePromptTemplate.fromTemplate(oneLine`
${systemPromptTemplate}
`);
const chatPrompt = ChatPromptTemplate.fromPromptMessages([
systemPrompt,
HumanMessagePromptTemplate.fromTemplate('QUESTION: """{input}"""'),
]);
const windowMemory = getBufferWindowMemory();
const chain = new LLMChain({
prompt: chatPrompt,
memory: windowMemory,
llm,
});
// eslint-disable-next-line no-constant-condition
while (true) {
output.write(chalk.green('\nStart chatting or type /help for a list of commands\n'));
const userInput = await rl.question('> ');
let response;
if (userInput.startsWith('/')) {
const [command, ...args] = userInput.slice(1).split(' ');
await commandHandler.execute(command, args, output);
} else {
const memoryVectorStore = await getMemoryVectorStore();
const contextVectorStore = await getContextVectorStore();
const question = sanitizeInput(userInput);
const config = getConfig();
const context = await getRelevantContext(contextVectorStore, question, config.numContextDocumentsToRetrieve);
const history = await getRelevantContext(memoryVectorStore, question, config.numMemoryDocumentsToRetrieve);
try {
response = await chain.call({
input: question,
context,
history,
immediate_history: config.useWindowMemory ? windowMemory : '',
});
if (response) {
await addDocumentsToMemoryVectorStore([
{ content: question, metadataType: 'question' },
{ content: response.text, metadataType: 'answer' },
]);
| await logChat(chatLogDirectory, question, response.response); |
}
} catch (error) {
if (error instanceof Error && error.message.includes('Cancel:')) {
// TODO: Handle cancel
} else if (error instanceof Error) {
output.write(chalk.red(error.message));
} else {
output.write(chalk.red(error));
}
}
}
output.write('\n');
}
| src/index.ts | gmickel-memorybot-bad0302 | [
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": " documents: Array<{ content: string; metadataType: string }>\n): Promise<void> {\n const formattedDocuments = documents.map(\n (doc) => new Document({ pageContent: doc.content, metadata: { type: doc.metadataType } })\n );\n await memoryWrapper.vectorStoreInstance.addDocuments(formattedDocuments);\n await saveMemoryVectorStore();\n}\nfunction resetBufferWindowMemory() {\n bufferWindowMemory.clear();",
"score": 0.8079125881195068
},
{
"filename": "src/lib/contextManager.ts",
"retrieved_chunk": " pageContent: text,\n }),\n ]);\n const vectorStore = await getContextVectorStore();\n await vectorStore.addDocuments(videoDocs);\n await vectorStore.save(dbDirectory);\n spinner.succeed();\n return;\n } catch (error) {\n if (spinner) {",
"score": 0.7737258672714233
},
{
"filename": "src/commands/resetChatCommand.ts",
"retrieved_chunk": "import chalk from 'chalk';\nimport createCommand from './command.js';\nimport { resetBufferWindowMemory, resetMemoryVectorStore, setMemoryVectorStore } from '../lib/memoryManager.js';\nconst resetChatCommand = createCommand(\n 'reset',\n [],\n 'Resets the chat and starts a new conversation - This clears the memory vector store and the buffer window memory.',\n async (_args, output) => {\n output.write(chalk.yellow('\\nResetting the chat!\\n'));\n await resetMemoryVectorStore((newMemoryVectorStore) => {",
"score": 0.7535869479179382
},
{
"filename": "src/lib/contextManager.ts",
"retrieved_chunk": " const pages = (await crawler.start()) as Page[];\n documents = await Promise.all(\n pages.map((row) => {\n const splitter = new RecursiveCharacterTextSplitter();\n const webDocs = splitter.splitDocuments([\n new Document({\n pageContent: row.text,\n }),\n ]);\n return webDocs;",
"score": 0.7345484495162964
},
{
"filename": "src/lib/contextManager.ts",
"retrieved_chunk": " const dbDirectory = getConfig().currentVectorStoreDatabasePath;\n try {\n vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));\n } catch {\n spinner = ora({\n ...defaultOraOptions,\n text: chalk.blue(`Creating new Context Vector Store in the ${dbDirectory} directory`),\n }).start();\n const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');\n const filesToAdd = await getDirectoryFiles(docsDirectory);",
"score": 0.7287442088127136
}
] | typescript | await logChat(chatLogDirectory, question, response.response); |
import chalk from 'chalk';
import { stdout as output } from 'node:process';
import { OpenAIEmbeddings } from 'langchain/embeddings/openai';
import { HNSWLib } from 'langchain/vectorstores/hnswlib';
import { JSONLoader } from 'langchain/document_loaders/fs/json';
import { TextLoader } from 'langchain/document_loaders/fs/text';
import { PDFLoader } from 'langchain/document_loaders/fs/pdf';
import { DocxLoader } from 'langchain/document_loaders/fs/docx';
import { EPubLoader } from 'langchain/document_loaders/fs/epub';
import { CSVLoader } from 'langchain/document_loaders/fs/csv';
import ora from 'ora';
import { MarkdownTextSplitter, RecursiveCharacterTextSplitter } from 'langchain/text_splitter';
import { Document } from 'langchain/document';
import path from 'path';
import { YoutubeTranscript } from 'youtube-transcript';
import getDirectoryListWithDetails from '../utils/getDirectoryListWithDetails.js';
import createDirectory from '../utils/createDirectory.js';
import { getConfig, getDefaultOraOptions, getProjectRoot, setCurrentVectorStoreDatabasePath } from '../config/index.js';
import getDirectoryFiles from '../utils/getDirectoryFiles.js';
import WebCrawler from './crawler.js';
const projectRootDir = getProjectRoot();
const defaultOraOptions = getDefaultOraOptions(output);
/**
* This function loads and splits a file based on its extension using different loaders and text
* splitters.
* @param {string} filePath - A string representing the path to the file that needs to be loaded and
* split into documents.
* @returns The function `loadAndSplitFile` returns a Promise that resolves to an array of `Document`
* objects, where each `Document` represents a split portion of the input file. The type of the
* `Document` object is `Document<Record<string, unknown>>`, which means it has a generic type
* parameter that is an object with string keys and unknown values.
*/
async function loadAndSplitFile(filePath: string): Promise<Document<Record<string, unknown>>[]> {
const fileExtension = path.extname(filePath);
let loader;
let documents: Document<Record<string, unknown>>[];
switch (fileExtension) {
case '.json':
loader = new JSONLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.txt':
loader = new TextLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.md':
loader = new TextLoader(filePath);
documents = await loader.loadAndSplit(new MarkdownTextSplitter());
break;
case '.pdf':
loader = new PDFLoader(filePath, { splitPages: false });
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.docx':
loader = new DocxLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.csv':
loader = new CSVLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.epub':
loader = new EPubLoader(filePath, { splitChapters: false });
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
default:
throw new Error(`Unsupported file extension: ${fileExtension}`);
}
return documents;
}
/**
* This function loads or creates a vector store using HNSWLib and OpenAIEmbeddings.
* @returns The function `loadOrCreateVectorStore` returns a Promise that resolves to an instance of
* the `HNSWLib` class, which is a vector store used for storing and searching high-dimensional
* vectors.
*/
async function loadOrCreateVectorStore(): Promise<HNSWLib> {
let vectorStore: HNSWLib;
let spinner;
await createDirectory(getConfig().currentVectorStoreDatabasePath);
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));
} catch {
spinner = ora({
...defaultOraOptions,
text: chalk.blue(`Creating new Context Vector Store in the ${dbDirectory} directory`),
}).start();
const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');
const filesToAdd = await getDirectoryFiles(docsDirectory);
const documents = await Promise.all(filesToAdd.map((filePath) => loadAndSplitFile(filePath)));
const flattenedDocuments = documents.reduce((acc, val) => acc.concat(val), []);
vectorStore = await HNSWLib.fromDocuments(flattenedDocuments, new OpenAIEmbeddings({ maxConcurrency: 5 }));
await vectorStore.save(dbDirectory);
spinner.succeed();
}
return vectorStore;
}
const contextVectorStore = await loadOrCreateVectorStore();
const contextWrapper = {
contextInstance: contextVectorStore,
};
/**
* This function loads or creates a new empty Context Vector Store using HNSWLib and OpenAIEmbeddings.
* @returns a Promise that resolves to an instance of the HNSWLib class, which represents a
* hierarchical navigable small world graph used for nearest neighbor search. The instance is either
* loaded from an existing directory or created as a new empty Context Vector Store with specified
* parameters.
*/
async function loadOrCreateEmptyVectorStore(subDirectory: string): Promise<HNSWLib> {
let vectorStore: HNSWLib;
let spinner;
const newContextVectorStorePath = path.join(projectRootDir, process.env.VECTOR_STORE_BASE_DIR || 'db', subDirectory);
await createDirectory(newContextVectorStorePath);
setCurrentVectorStoreDatabasePath(newContextVectorStorePath);
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));
output.write(chalk.blue(`Using Context Vector Store in the ${dbDirectory} directory\n`));
} catch {
spinner = ora({
...defaultOraOptions,
text: chalk.blue(`Creating new empty Context Vector Store in the ${dbDirectory} directory`),
}).start();
vectorStore = new HNSWLib(new OpenAIEmbeddings({ maxConcurrency: 5 }), {
space: 'cosine',
numDimensions: 1536,
});
spinner.succeed();
output.write(
chalk.red.bold(
`\nThe Context Vector Store is currently empty and unsaved, add context to is using \`/add-docs\`, \`/add-url\` or \`/add-youtube\``
)
);
}
contextWrapper.contextInstance = vectorStore;
return vectorStore;
}
async function getContextVectorStore() {
return contextWrapper.contextInstance;
}
/**
* This function adds documents to a context vector store and saves them.
* @param {string[]} filePaths - The `filePaths` parameter is an array of strings representing the file
* paths of the documents that need to be added to the Context Vector Store.
* @returns nothing (`undefined`).
*/
async function addDocument(filePaths: string[]) {
let spinner;
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
spinner = ora({ ...defaultOraOptions, text: `Adding files to the Context Vector Store` }).start();
const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');
const documents = await Promise.all(
filePaths.map((filePath) => loadAndSplitFile(path.join(docsDirectory, filePath)))
);
const flattenedDocuments = documents.reduce((acc, val) => acc.concat(val), []);
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(flattenedDocuments);
await vectorStore.save(dbDirectory);
spinner.succeed();
return;
} catch (error) {
if (spinner) {
spinner.fail(chalk.red(error));
} else {
output.write(chalk.red(error));
}
}
}
/**
* The function adds a YouTube video transcript to a Context Vector Store.
* @param {string} URLOrVideoID - The URLOrVideoID parameter is a string that represents either the URL
* or the video ID of a YouTube video.
* @returns Nothing is being returned explicitly in the code, but the function is expected to return
* undefined after completing its execution.
*/
async function addYouTube(URLOrVideoID: string) {
let spinner;
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
spinner = ora({
...defaultOraOptions,
text: `Adding Video transcript from ${URLOrVideoID} to the Context Vector Store`,
}).start();
const transcript = await YoutubeTranscript.fetchTranscript(URLOrVideoID);
const text = transcript.map((part) => part.text).join(' ');
const splitter = new RecursiveCharacterTextSplitter();
const videoDocs = await splitter.splitDocuments([
new Document({
pageContent: text,
}),
]);
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(videoDocs);
await vectorStore.save(dbDirectory);
spinner.succeed();
return;
} catch (error) {
if (spinner) {
spinner.fail(chalk.red(error));
} else {
output.write(chalk.red(error));
}
}
}
/**
* The function crawls a given URL, extracts text from the pages, splits the text into documents,
* generates embeddings for the documents, and saves them to a vector store.
* @param {string} URL - The URL of the website to crawl and extract text from.
* @param {string} selector - The selector parameter is a string that represents a CSS selector used to
* identify the HTML elements to be crawled on the web page. The WebCrawler will only crawl the
* elements that match the selector.
* @param {number} maxPages - The maximum number of pages to crawl for the given URL.
* @param {number} numberOfCharactersRequired - `numberOfCharactersRequired` is a number that specifies
* the minimum number of characters required for a document to be considered valid and used for
* generating embeddings. Any document with less than this number of characters will be discarded.
* @returns Nothing is being returned explicitly in the function, but it is implied that the function
* will return undefined if there are no errors.
*/
async function addURL(URL: string, selector: string, maxPages: number, numberOfCharactersRequired: number) {
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
const addUrlSpinner = ora({ ...defaultOraOptions, text: `Crawling ${URL}` });
let documents;
try {
addUrlSpinner.start();
const progressCallback = (linksFound: number, linksCrawled: number, currentUrl: string) => {
addUrlSpinner.text = `Links found: ${linksFound} - Links crawled: ${linksCrawled} - Crawling ${currentUrl}`;
};
| const crawler = new WebCrawler([URL], progressCallback, selector, maxPages, numberOfCharactersRequired); |
const pages = (await crawler.start()) as Page[];
documents = await Promise.all(
pages.map((row) => {
const splitter = new RecursiveCharacterTextSplitter();
const webDocs = splitter.splitDocuments([
new Document({
pageContent: row.text,
}),
]);
return webDocs;
})
);
addUrlSpinner.succeed();
} catch (error) {
addUrlSpinner.fail(chalk.red(error));
}
if (documents) {
const generateEmbeddingsSpinner = ora({ ...defaultOraOptions, text: `Generating Embeddings` });
try {
const flattenedDocuments = documents.flat();
generateEmbeddingsSpinner.text = `Generating Embeddings for ${flattenedDocuments.length} documents`;
generateEmbeddingsSpinner.start();
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(flattenedDocuments);
await vectorStore.save(dbDirectory);
generateEmbeddingsSpinner.succeed();
return;
} catch (error) {
generateEmbeddingsSpinner.fail(chalk.red(error));
}
}
}
async function listContextStores() {
const projectRoot = getProjectRoot(); // Please replace this with your actual function to get the project root
const vectorStoreDir = process.env.VECTOR_STORE_BASE_DIR || 'db';
const targetDir = path.join(projectRoot, vectorStoreDir);
const contextVectorStoresList = await getDirectoryListWithDetails(targetDir);
output.write(chalk.blue(`Context Vector Stores in ${targetDir}:\n\n`));
Object.entries(contextVectorStoresList).forEach(([dir, files]) => {
output.write(chalk.yellow(`Directory: ${dir}`));
if (dir === getConfig().currentVectorStoreDatabasePath) {
output.write(chalk.green(` (Currently selected)`));
}
output.write('\n');
files.forEach((file) => {
output.write(chalk.yellow(` File: ${file.name}, Size: ${file.size} KB\n`));
});
});
}
export { getContextVectorStore, addDocument, addURL, addYouTube, listContextStores, loadOrCreateEmptyVectorStore };
| src/lib/contextManager.ts | gmickel-memorybot-bad0302 | [
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": " documents: Array<{ content: string; metadataType: string }>\n): Promise<void> {\n const formattedDocuments = documents.map(\n (doc) => new Document({ pageContent: doc.content, metadata: { type: doc.metadataType } })\n );\n await memoryWrapper.vectorStoreInstance.addDocuments(formattedDocuments);\n await saveMemoryVectorStore();\n}\nfunction resetBufferWindowMemory() {\n bufferWindowMemory.clear();",
"score": 0.7707154154777527
},
{
"filename": "src/commands/addURLCommand.ts",
"retrieved_chunk": "import chalk from 'chalk';\nimport createCommand from './command.js';\nimport { addURL } from '../lib/contextManager.js';\nconst addURLCommand = createCommand(\n 'add-url',\n ['url'],\n `Scrapes the content from a url and adds it to the context vector store.\\n\n Arguments: \\`url\\`, \\`selector to extract\\` (Default: body), \\`Maximum number of links to follow\\` (Default: 20), \\`Ignore pages with less than n characters\\` (Default: 200)\\n\n Example: /add-url https://dociq.io main 10 500\\n\n This operation may try to generate a large number of embeddings depending on the structure of the web pages and may lead to rate-limiting.\\n",
"score": 0.7667068243026733
},
{
"filename": "src/lib/crawler.ts",
"retrieved_chunk": " this.urls = urls;\n this.selector = selector;\n this.limit = limit;\n this.textLengthMinimum = textLengthMinimum;\n this.progressCallback = progressCallback;\n this.count = 0;\n this.pages = [];\n this.crawler = new Crawler({\n maxConnections: 10,\n callback: this.handleRequest,",
"score": 0.7659075260162354
},
{
"filename": "src/lib/crawler.ts",
"retrieved_chunk": " title: string;\n}\n/* The WebCrawler class is a TypeScript implementation of a web crawler that can extract text from web\npages and follow links to crawl more pages. */\nclass WebCrawler {\n pages: Page[];\n limit: number;\n urls: string[];\n count: number;\n textLengthMinimum: number;",
"score": 0.7621438503265381
},
{
"filename": "src/lib/crawler.ts",
"retrieved_chunk": " selector: string;\n progressCallback: ProgressCallback;\n crawler: Crawler;\n constructor(\n urls: string[],\n progressCallback: ProgressCallback,\n selector = 'body',\n limit = 20,\n textLengthMinimum = 200\n ) {",
"score": 0.7559791803359985
}
] | typescript | const crawler = new WebCrawler([URL], progressCallback, selector, maxPages, numberOfCharactersRequired); |
import {isNoScenamatica} from "../utils.js"
import {deployPlugin} from "./deployer.js"
import {kill, onDataReceived} from "./client";
import type {ChildProcess} from "node:child_process";
import {spawn} from "node:child_process";
import type {Writable} from "node:stream";
import * as fs from "node:fs";
import path from "node:path";
import {info, setFailed, warning} from "@actions/core";
import {printFooter} from "../outputs/summary";
let serverProcess: ChildProcess | undefined
let serverStdin: Writable | undefined
const genArgs = (executable: string, args: string[]) => {
return [
...args,
"-jar",
executable,
"nogui"
]
}
const createServerProcess = (workDir: string, executable: string, args: string[] = []) => {
const cp = spawn(
"java",
genArgs(executable, args),
{
cwd: workDir
}
)
serverStdin = cp.stdin
serverProcess = cp
return cp
}
export const startServerOnly = async (workDir: string, executable: string, args: string[] = []) => {
info(`Starting server with executable ${executable} and args ${args.join(" ")}`)
const cp = createServerProcess(workDir, executable, args)
cp.stdout.on("data", (data: Buffer) => {
const line = data.toString("utf8")
if (line.includes("Done") && line.includes("For help, type \"help\""))
serverStdin?.write("stop\n")
if (line.endsWith("\n"))
info(line.slice(0, - 1))
else
info(line)
})
return new Promise<number>((resolve, reject) => {
cp.on("exit", (code) => {
if (code === 0)
resolve(code)
else
reject(code)
})
})
}
export const stopServer = () => {
if (!serverStdin || !serverProcess)
return
info("Stopping server...")
serverStdin.write("stop\n")
setTimeout(() => {
if (serverProcess!.killed)
return
warning("Server didn't stop in time, killing it...")
serverProcess?.kill("SIGKILL")
}, 1000 * 20)
}
export const startTests = async (serverDir: string, executable: string, pluginFile: string) => {
info(`Starting tests of plugin ${pluginFile}.`)
if (isNoScenamatica())
await removeScenamatica(serverDir)
await deployPlugin(serverDir, pluginFile)
const cp = createServerProcess(serverDir, executable)
cp.stdout.on("data", async (data: Buffer) => {
await | onDataReceived(data.toString("utf8"))
})
} |
const removeScenamatica = async (serverDir: string) => {
info("Removing Scenamatica from server...")
const pluginDir = path.join(serverDir, "plugins")
const files = await fs.promises.readdir(pluginDir)
for (const file of files) {
if (file.includes("Scenamatica") && file.endsWith(".jar")) {
info(`Removing ${file}...`)
await fs.promises.rm(path.join(pluginDir, file))
}
}
}
export const endTests = async (succeed: boolean) => {
info("Ending tests, shutting down server...")
kill()
stopServer()
await printFooter()
let code: number
if (succeed) {
info("Tests succeeded")
code = 0
} else {
setFailed("Tests failed")
code = 1
}
process.exit(code)
}
| src/server/controller.ts | TeamKun-scenamatica-action-6f66283 | [
{
"filename": "src/main.ts",
"retrieved_chunk": " initPRMode(pullRequest, githubToken)\n }\n if (!fs.existsSync(pluginFile)) {\n setFailed(`Plugin file ${pluginFile} does not exist`)\n return\n }\n const paper = await deployServer(serverDir, javaVersion, mcVersion, scenamaticaVersion)\n info(\"Starting tests...\")\n await startTests(serverDir, paper, pluginFile)\n}",
"score": 0.8262317180633545
},
{
"filename": "src/server/deployer.ts",
"retrieved_chunk": "export const deployPlugin = async (serverDir: string, pluginFile: string) => {\n const pluginDir = path.join(serverDir, \"plugins\")\n await io.mkdirP(pluginDir)\n await io.cp(pluginFile, pluginDir)\n}\nconst initScenamaticaConfig = async (configDir: string, scenamaticaVersion: string) => {\n const configPath = path.join(configDir, \"config.yml\")\n const configData = yaml.load(await fs.promises.readFile(configPath, \"utf8\")) as {\n interfaces?: {\n raw: boolean",
"score": 0.8046951293945312
},
{
"filename": "src/server/deployer.ts",
"retrieved_chunk": " info(\"Extracting...\")\n await (isTar ? tc.extractTar(dest, destDir) : tc.extractZip(dest, destDir))\n core.addPath(path.join(destDir, \"bin\"))\n info(`Installed Java ${version}`)\n}\nconst isJavaInstalled = async () => {\n try {\n await exec(\"java\", [\"-version\"])\n return true\n } catch {",
"score": 0.7983375191688538
},
{
"filename": "src/server/deployer.ts",
"retrieved_chunk": "const downloadScenamatica = async (destDir: string, version: string) => {\n const url = SCENAMATICA_URL.replace(/\\{version}/g, version)\n info(`Downloading Scenamatica ${version} from ${url}`)\n const destPath = await tc.downloadTool(url, path.join(destDir, `Scenamatica-${version}.jar`))\n info(`Downloaded Scenamatica ${version} to ${destPath}`)\n return destPath\n}\nconst fetchLatestJavaLinkFor = async (version: string) => {\n const processPlatform = process.platform\n const platform = processPlatform === \"win32\" ? \"windows\" : processPlatform === \"darwin\" ? \"macos\" : \"linux\"",
"score": 0.7605834603309631
},
{
"filename": "src/server/deployer.ts",
"retrieved_chunk": " await (os === \"unix\" ? exec(\"chmod\", [\"+x\", dest]) : exec(\"icacls\", [dest, \"/grant\", \"Everyone:(F)\"]));\n info(`Downloaded Paper ${mcVersion} build ${build} to ${dest}`)\n return build\n}\nconst writeEula = async (dir: string) => {\n const eulaPath = path.join(dir, \"eula.txt\")\n const eulaContent = \"eula=true\\n\"\n await fs.promises.writeFile(eulaPath, eulaContent)\n info(`Wrote eula.txt to ${eulaPath}`)\n}",
"score": 0.753364086151123
}
] | typescript | onDataReceived(data.toString("utf8"))
})
} |
import {isNoScenamatica} from "../utils.js"
import {deployPlugin} from "./deployer.js"
import {kill, onDataReceived} from "./client";
import type {ChildProcess} from "node:child_process";
import {spawn} from "node:child_process";
import type {Writable} from "node:stream";
import * as fs from "node:fs";
import path from "node:path";
import {info, setFailed, warning} from "@actions/core";
import {printFooter} from "../outputs/summary";
let serverProcess: ChildProcess | undefined
let serverStdin: Writable | undefined
const genArgs = (executable: string, args: string[]) => {
return [
...args,
"-jar",
executable,
"nogui"
]
}
const createServerProcess = (workDir: string, executable: string, args: string[] = []) => {
const cp = spawn(
"java",
genArgs(executable, args),
{
cwd: workDir
}
)
serverStdin = cp.stdin
serverProcess = cp
return cp
}
export const startServerOnly = async (workDir: string, executable: string, args: string[] = []) => {
info(`Starting server with executable ${executable} and args ${args.join(" ")}`)
const cp = createServerProcess(workDir, executable, args)
cp.stdout.on("data", (data: Buffer) => {
const line = data.toString("utf8")
if (line.includes("Done") && line.includes("For help, type \"help\""))
serverStdin?.write("stop\n")
if (line.endsWith("\n"))
info(line.slice(0, - 1))
else
info(line)
})
return new Promise<number>((resolve, reject) => {
cp.on("exit", (code) => {
if (code === 0)
resolve(code)
else
reject(code)
})
})
}
export const stopServer = () => {
if (!serverStdin || !serverProcess)
return
info("Stopping server...")
serverStdin.write("stop\n")
setTimeout(() => {
if (serverProcess!.killed)
return
warning("Server didn't stop in time, killing it...")
serverProcess?.kill("SIGKILL")
}, 1000 * 20)
}
export const startTests = async (serverDir: string, executable: string, pluginFile: string) => {
info(`Starting tests of plugin ${pluginFile}.`)
if (isNoScenamatica())
await removeScenamatica(serverDir)
await deployPlugin(serverDir, pluginFile)
const cp = createServerProcess(serverDir, executable)
cp.stdout.on("data", async (data: Buffer) => {
await onDataReceived(data.toString("utf8"))
})
}
const removeScenamatica = async (serverDir: string) => {
info("Removing Scenamatica from server...")
const pluginDir = path.join(serverDir, "plugins")
const files = await fs.promises.readdir(pluginDir)
for (const file of files) {
if (file.includes("Scenamatica") && file.endsWith(".jar")) {
info(`Removing ${file}...`)
await fs.promises.rm(path.join(pluginDir, file))
}
}
}
export const endTests = async (succeed: boolean) => {
info("Ending tests, shutting down server...")
kill()
stopServer()
await | printFooter()
let code: number
if (succeed) { |
info("Tests succeeded")
code = 0
} else {
setFailed("Tests failed")
code = 1
}
process.exit(code)
}
| src/server/controller.ts | TeamKun-scenamatica-action-6f66283 | [
{
"filename": "src/server/client.ts",
"retrieved_chunk": " incomingBuffer = messages.slice(1).join(\"\\n\") || undefined\n if (!await processPacket(messages[0]))\n info(messages[0])\n }\n}\nexport const kill = () => {\n alive = false\n}\nconst processPacket = async (msg: string) => {\n if (!alive) {",
"score": 0.8062002658843994
},
{
"filename": "src/outputs/messages.ts",
"retrieved_chunk": " )\n}\nexport const getFooter = () => {\n return joinLine(\n \"<hr />\",\n getLicenseMessage()\n )\n}\nconst getEnvInfoMessage = () => {\n const runArgs = getArguments()",
"score": 0.7975443601608276
},
{
"filename": "src/outputs/pull-request/appender.ts",
"retrieved_chunk": "}\nexport const reportSessionEnd = (packet: PacketSessionEnd) => {\n const {results, finishedAt, startedAt} = packet\n appendHeaderIfNotPrinted()\n outMessage += `${getTestSummary(results, startedAt, finishedAt)}\n ${getTestResultTable(results, true)}\n `\n}\nconst appendHeaderIfNotPrinted = () => {\n if (!headerPrinted) {",
"score": 0.7891334295272827
},
{
"filename": "src/utils.ts",
"retrieved_chunk": " passed,\n failures,\n skipped,\n cancelled,\n }\n}\nexport const isTestSucceed = (results: PacketTestEnd[]) => {\n const {failures} = extractTestResults(results)\n const threshold = getArguments().failThreshold\n return failures <= threshold",
"score": 0.7866538166999817
},
{
"filename": "src/logging.ts",
"retrieved_chunk": " }\n}\nexport const logSessionStart = (startedAt: number, tests: number): void => {\n info(\"--------------------------------------\")\n info(\" T E S T S\")\n info(\"--------------------------------------\")\n info(`The session is started at ${startedAt}, ${tests} tests are marked to be run.`)\n}\nexport const logSessionEnd = (sessionEnd: PacketSessionEnd): void => {\n const elapsed = `${Math.ceil((sessionEnd.finishedAt - sessionEnd.startedAt) / 1000)} sec`",
"score": 0.7834547758102417
}
] | typescript | printFooter()
let code: number
if (succeed) { |
import {isNoScenamatica} from "../utils.js"
import {deployPlugin} from "./deployer.js"
import {kill, onDataReceived} from "./client";
import type {ChildProcess} from "node:child_process";
import {spawn} from "node:child_process";
import type {Writable} from "node:stream";
import * as fs from "node:fs";
import path from "node:path";
import {info, setFailed, warning} from "@actions/core";
import {printFooter} from "../outputs/summary";
let serverProcess: ChildProcess | undefined
let serverStdin: Writable | undefined
const genArgs = (executable: string, args: string[]) => {
return [
...args,
"-jar",
executable,
"nogui"
]
}
const createServerProcess = (workDir: string, executable: string, args: string[] = []) => {
const cp = spawn(
"java",
genArgs(executable, args),
{
cwd: workDir
}
)
serverStdin = cp.stdin
serverProcess = cp
return cp
}
export const startServerOnly = async (workDir: string, executable: string, args: string[] = []) => {
info(`Starting server with executable ${executable} and args ${args.join(" ")}`)
const cp = createServerProcess(workDir, executable, args)
cp.stdout.on("data", (data: Buffer) => {
const line = data.toString("utf8")
if (line.includes("Done") && line.includes("For help, type \"help\""))
serverStdin?.write("stop\n")
if (line.endsWith("\n"))
info(line.slice(0, - 1))
else
info(line)
})
return new Promise<number>((resolve, reject) => {
cp.on("exit", (code) => {
if (code === 0)
resolve(code)
else
reject(code)
})
})
}
export const stopServer = () => {
if (!serverStdin || !serverProcess)
return
info("Stopping server...")
serverStdin.write("stop\n")
setTimeout(() => {
if (serverProcess!.killed)
return
warning("Server didn't stop in time, killing it...")
serverProcess?.kill("SIGKILL")
}, 1000 * 20)
}
export const startTests = async (serverDir: string, executable: string, pluginFile: string) => {
info(`Starting tests of plugin ${pluginFile}.`)
if (isNoScenamatica())
await removeScenamatica(serverDir)
await deployPlugin(serverDir, pluginFile)
const cp = createServerProcess(serverDir, executable)
cp.stdout.on("data", async (data: Buffer) => {
await onDataReceived(data.toString("utf8"))
})
}
const removeScenamatica = async (serverDir: string) => {
info("Removing Scenamatica from server...")
const pluginDir = path.join(serverDir, "plugins")
const files = await fs.promises.readdir(pluginDir)
for (const file of files) {
if (file.includes("Scenamatica") && file.endsWith(".jar")) {
info(`Removing ${file}...`)
await fs.promises.rm(path.join(pluginDir, file))
}
}
}
export const endTests = async (succeed: boolean) => {
info("Ending tests, shutting down server...")
kill()
stopServer()
| await printFooter()
let code: number
if (succeed) { |
info("Tests succeeded")
code = 0
} else {
setFailed("Tests failed")
code = 1
}
process.exit(code)
}
| src/server/controller.ts | TeamKun-scenamatica-action-6f66283 | [
{
"filename": "src/server/client.ts",
"retrieved_chunk": " incomingBuffer = messages.slice(1).join(\"\\n\") || undefined\n if (!await processPacket(messages[0]))\n info(messages[0])\n }\n}\nexport const kill = () => {\n alive = false\n}\nconst processPacket = async (msg: string) => {\n if (!alive) {",
"score": 0.8051278591156006
},
{
"filename": "src/outputs/messages.ts",
"retrieved_chunk": " )\n}\nexport const getFooter = () => {\n return joinLine(\n \"<hr />\",\n getLicenseMessage()\n )\n}\nconst getEnvInfoMessage = () => {\n const runArgs = getArguments()",
"score": 0.7962254881858826
},
{
"filename": "src/outputs/pull-request/appender.ts",
"retrieved_chunk": "}\nexport const reportSessionEnd = (packet: PacketSessionEnd) => {\n const {results, finishedAt, startedAt} = packet\n appendHeaderIfNotPrinted()\n outMessage += `${getTestSummary(results, startedAt, finishedAt)}\n ${getTestResultTable(results, true)}\n `\n}\nconst appendHeaderIfNotPrinted = () => {\n if (!headerPrinted) {",
"score": 0.7927734851837158
},
{
"filename": "src/utils.ts",
"retrieved_chunk": " passed,\n failures,\n skipped,\n cancelled,\n }\n}\nexport const isTestSucceed = (results: PacketTestEnd[]) => {\n const {failures} = extractTestResults(results)\n const threshold = getArguments().failThreshold\n return failures <= threshold",
"score": 0.7924844026565552
},
{
"filename": "src/main.ts",
"retrieved_chunk": " initPRMode(pullRequest, githubToken)\n }\n if (!fs.existsSync(pluginFile)) {\n setFailed(`Plugin file ${pluginFile} does not exist`)\n return\n }\n const paper = await deployServer(serverDir, javaVersion, mcVersion, scenamaticaVersion)\n info(\"Starting tests...\")\n await startTests(serverDir, paper, pluginFile)\n}",
"score": 0.786257266998291
}
] | typescript | await printFooter()
let code: number
if (succeed) { |
import {extractTestResults, getArguments} from "../utils";
import type {PacketTestEnd} from "../packets";
import {getEmojiForCause} from "../logging";
const MESSAGES_PASSED = [
":tada: Congrats! All tests passed! :star2:",
":raised_hands: High-five! You nailed all the tests! :tada::tada:",
":confetti_ball: Hooray! Everything's working perfectly! :tada::confetti_ball:",
":100: Perfect score! All tests passed with flying colors! :rainbow::clap:",
":thumbsup: Great job! All tests passed without a hitch! :rocket::star2:",
":metal: Rock on! All tests passed flawlessly! :guitar::metal:",
":partying_face: Celebrate good times! All tests passed with flying colors! :tada::confetti_ball::balloon:",
":muscle: You crushed it! All tests passed with ease! :fire::muscle:",
":1st_place_medal: Gold medal performance! All tests passed with flying colors! :1st_place_medal::star2:",
":champagne: Pop the champagne! All tests passed, time to celebrate! :champagne::tada:"
];
const MESSAGES_NO_TESTS = [
"Alright, who forgot to write tests? :face_with_raised_eyebrow:",
"No tests? Time to break out the crystal ball. :crystal_ball:",
"Tests? Who writes tests? :person_shrugging:",
"No tests found. Did they run away? :man_running: :woman_running:",
"No tests, no glory. :trophy:",
"Tests? We don't need no stinkin' tests! :shushing_face:",
"No tests? I guess we'll just have to wing it. :eagle:",
"You get a test, and you get a test! Everybody gets a test! :gift: :tada:",
"No tests? That's impossible! :dizzy_face:",
"Tests make the code go round. :carousel_horse:"
];
const MESSAGES_FAILED = [
"Oops! Something went wrong! :scream_cat:",
"Oh no! The tests have betrayed us! :scream:",
"Houston, we have a problem. :rocket:",
"Looks like we have some debugging to do. :beetle:",
"Failures? More like opportunities to improve! :muscle:",
"This is not the result we were looking for. :confused:",
"Looks like we need to rethink our strategy. :thinking:",
"Don't worry, we'll get 'em next time! :sunglasses:",
"Keep calm and debug on. :female_detective:",
"The only way is up from here! :rocket:"
];
const MESSAGES_PASSED_WITH_THRESHOLD = [
"Tests passed, but some are being rebellious. Debug mode: ON! :microscope:",
"Almost there! Some tests failed, but hey, progress is progress! :turtle:",
"Good news: most tests passed. Bad news: a few had different plans. Let's fix 'em! :hammer:",
"We're on the right track, but some tests are playing hard to get. Challenge accepted! :muscle:",
"Tests went well overall, but we have a few stubborn failures. Time for some gentle persuasion! :wrench:",
"Success with a side of failures. It's like a bittersweet symphony. Let's sweeten it up! :musical_note:",
"We're soaring high, but some tests got left behind. Time to reel them back in! :fishing_pole_and_fish:",
"Great progress, but we've got some test gremlins causing trouble. Let's send them packing! :imp:",
"Victory is ours, with a sprinkle of defeat. Let's conquer those pesky failures! :crossed_swords:",
"We're almost there, but a few tests are being rebellious. Let's bring them back to the flock! :sheep:"
];
const REPORT_URL = "https://github.com/TeamKun/Scenamatica/issues/new?assignees=PeyaPeyaPeyang&labels=Type%3A+Bug&projects=&template=bug_report.yml&title=%E3%80%90%E3%83%90%E3%82%B0%E3%80%91"
export const getHeader = (isError: boolean) => {
const result = [ wrap("h1", "Scenamatica"), wrap("h2", "Summary"), "<hr />"]
if (isError) {
result.push(
wrap("h4", ":no_entry: ERROR!!"),
wrap("p", "An unexpected error occurred while running the server and Scenamatica daemon."),
wrap("h2", "Error details")
)
}
return joinLine(...result)
}
export const getRunningMessage = () => {
const messages = [
wrap("h4", ":hourglass_flowing_sand: Hey there! :wave: We're currently testing your plugin."),
wrap("p", "The testing process may take some time, but we'll update this message once it's complete.")
]
return joinLine(...messages)
}
| export const getTestSummary = (results: PacketTestEnd[], startedAt: number, finishedAt: number) => { |
const elapsed = (finishedAt - startedAt) / 1000
const {
total,
passed,
failures,
skipped,
cancelled
} = extractTestResults(results)
return joinLine(
getSummaryHeader(total, elapsed, passed, failures, skipped, cancelled),
"<hr />",
wrap("h2", "Details")
)
}
export const getTestResultTable = (results: PacketTestEnd[], minimize = false) => {
const header = wrap("thead", joinLine(
wrap("tr", joinLine(
wrap("th", " "),
wrap("th", "Test"),
wrap("th", "Cause"),
wrap("th", "State"),
wrap("th", "Started at"),
wrap("th", "Finished at"),
wrap("th", "Elapsed"),
wrap("th", "Test description")
))
)
)
const body = wrap("tbody", joinLine(...results.map((result) => {
const {
cause,
state,
scenario,
startedAt,
finishedAt
} = result
const emoji = getEmojiForCause(cause)
const { name } = scenario
const startedAtStr = new Date(startedAt).toLocaleString()
const finishedAtStr = new Date(finishedAt).toLocaleString()
const testElapsed = `${Math.ceil((finishedAt - startedAt) / 1000)} sec`
const description = scenario.description || "No description"
return wrap("tr", joinLine(
wrap("td", emoji),
wrap("td", name),
wrap("td", cause),
wrap("td", state),
wrap("td", startedAtStr),
wrap("td", finishedAtStr),
wrap("td", testElapsed),
wrap("td", description)
))
}))
)
const table = wrap("table", joinLine(header, body))
if (minimize)
return wrap("details", joinLine(
wrap("summary", "Full test results"),
table
))
return table
}
const getSummaryHeader = (total: number, elapsed: number, passed: number, failures: number, skipped: number, cancelled: number) => {
const threshold = getArguments().failThreshold
let messageSource: string[]
if (total === passed + skipped) messageSource = MESSAGES_PASSED
else if (failures === 0) messageSource = MESSAGES_NO_TESTS
else if (failures <= threshold) messageSource = MESSAGES_PASSED_WITH_THRESHOLD
else messageSource = MESSAGES_FAILED
const summaryText = messageSource[Math.floor(Math.random() * messageSource.length)]
return joinLine(
wrap("h4", summaryText),
"<br />",
wrap("p", join(", ",
`Tests run: ${total}`,
`Failures: ${failures}`,
`Skipped: ${skipped}`,
`Cancelled: ${cancelled}`,
`Time elapsed: ${elapsed} sec`
))
)
}
export const getExceptionString = (errorType: string, errorMessage: string, errorStackTrace: string[]) => {
return wrap("pre", wrap("code", joinLine(
"An unexpected error has occurred while running Scenamatica daemon:",
`${errorType}: ${errorMessage}`,
...errorStackTrace.map((s) => ` at ${s}`)
)
))
}
export const getReportingMessage = () => {
return joinLine(
wrap("h2", "Reporting bugs"),
wrap("p", combine(
"If you believe this is a bug, please report it to ",
wrap("a", "Scenamatica", { href: REPORT_URL }),
" along with the contents of this error message, the above stack trace, and the environment information listed below."
)),
getEnvInfoMessage()
)
}
export const getFooter = () => {
return joinLine(
"<hr />",
getLicenseMessage()
)
}
const getEnvInfoMessage = () => {
const runArgs = getArguments()
const envInfo = [
"+ Versions:",
` - Scenamatica: ${runArgs.scenamaticaVersion}`,
` - Minecraft: ${runArgs.mcVersion}`,
` - Java: ${runArgs.javaVersion}`,
` - Node.js: ${process.version}`,
"+ Runner:",
` - OS: ${process.platform}`,
` - Arch: ${process.arch}`,
]
return wrap("details", joinLine(
wrap("summary", "Environment Information"),
wrap("pre", wrap("code", envInfo.join("\n")))
))
}
const getLicenseMessage = () => {
return joinLine(
wrap("h2" , "License"),
wrap("small", `This test report has been generated by ${
wrap("a", "Scenamatica", { href: "https://github.com/TeamKUN/Scenamatica" })
} and licensed under ${
wrap("a", "MIT License", { href: "https://github.com/TeamKUN/Scenamatica/blob/main/LICENSE" })
}.`),
"<br />",
wrap("small", "You can redistribute it and/or modify it under the terms of the MIT License.")
)
}
const wrap = (tag: string, text: string, props: { [key: string]: string } = {}) => {
const attributes = Object.entries(props).map(([key, value]) => `${key}="${value}"`).join(" ")
return `<${tag} ${attributes}>${text}</${tag}>`
}
const joinLine = (...texts: string[]) => {
return texts.join("\n")
}
const join = (delimiter: string, ...texts: string[]) => {
return texts.join(delimiter)
}
const combine = (...texts: string[]) => {
return texts.join("")
}
| src/outputs/messages.ts | TeamKun-scenamatica-action-6f66283 | [
{
"filename": "src/logging.ts",
"retrieved_chunk": " }\n}\nexport const logSessionStart = (startedAt: number, tests: number): void => {\n info(\"--------------------------------------\")\n info(\" T E S T S\")\n info(\"--------------------------------------\")\n info(`The session is started at ${startedAt}, ${tests} tests are marked to be run.`)\n}\nexport const logSessionEnd = (sessionEnd: PacketSessionEnd): void => {\n const elapsed = `${Math.ceil((sessionEnd.finishedAt - sessionEnd.startedAt) / 1000)} sec`",
"score": 0.8378268480300903
},
{
"filename": "src/outputs/pull-request/appender.ts",
"retrieved_chunk": "}\nexport const reportSessionEnd = (packet: PacketSessionEnd) => {\n const {results, finishedAt, startedAt} = packet\n appendHeaderIfNotPrinted()\n outMessage += `${getTestSummary(results, startedAt, finishedAt)}\n ${getTestResultTable(results, true)}\n `\n}\nconst appendHeaderIfNotPrinted = () => {\n if (!headerPrinted) {",
"score": 0.8189873695373535
},
{
"filename": "src/logging.ts",
"retrieved_chunk": " warning(`${emoji} The test ${name} is failed with state ${state} in ${elapsed}.`)\n break\n }\n }\n endGroup()\n}\nexport const getEmojiForCause = (cause: TestResultCause): string => {\n switch (cause) {\n case TestResultCause.PASSED: {\n return \"✔\"",
"score": 0.8075639009475708
},
{
"filename": "src/utils.ts",
"retrieved_chunk": " passed,\n failures,\n skipped,\n cancelled,\n }\n}\nexport const isTestSucceed = (results: PacketTestEnd[]) => {\n const {failures} = extractTestResults(results)\n const threshold = getArguments().failThreshold\n return failures <= threshold",
"score": 0.7940621972084045
},
{
"filename": "src/outputs/summary.ts",
"retrieved_chunk": "const printSummary = async (sessionEnd: PacketSessionEnd) => {\n const {results, finishedAt, startedAt} = sessionEnd\n summary.addRaw(getHeader(false))\n summary.addRaw(getTestSummary(results, startedAt, finishedAt))\n summary.addRaw(getTestResultTable(results))\n await summary.write()\n}\nlet errorHeaderPrinted = false\nlet errorReportingMessagePrinted = false\nconst printErrorSummary = async (errorType: string, errorMessage: string, errorStackTrace: string[]) => {",
"score": 0.780617356300354
}
] | typescript | export const getTestSummary = (results: PacketTestEnd[], startedAt: number, finishedAt: number) => { |
import {
DocumentDuplicateIcon as CopyIcon,
InformationCircleIcon as InfoIcon,
} from "@heroicons/react/24/outline";
import va from "@vercel/analytics";
import {
PropsWithChildren,
useCallback,
useEffect,
useMemo,
useState,
} from "react";
import { Prism as SyntaxHighlighter } from "react-syntax-highlighter";
import { Model } from "../data/modelMetadata";
import GitHubIcon from "./GitHubIcon";
export interface ModelCardProps {
visible: boolean;
onDismiss: () => void;
model: Model;
}
type Tabs = "python" | "js" | "curl";
export default function ModelCard(props: PropsWithChildren<ModelCardProps>) {
const { model, onDismiss, visible } = props;
const [activeTab, setActiveTab] = useState<Tabs>("python");
const selectTab = (tab: Tabs) => () => {
setActiveTab(tab);
};
const [style, setStyle] = useState({});
useEffect(() => {
import("react-syntax-highlighter/dist/esm/styles/prism/material-dark").then(
(mod) => setStyle(mod.default)
);
});
const modalClassName = [
"modal max-md:w-full max-md:modal-bottom",
visible ? "modal-open" : "",
];
const copyEndpoint = useCallback(() => {
navigator.clipboard.writeText(model.apiEndpoint);
}, [model.apiEndpoint]);
const selectOnClick = useCallback(
(event: React.MouseEvent<HTMLInputElement>) => {
event.currentTarget.select();
},
[]
);
const isTabSelected = useCallback(
(tab: Tabs) => {
return activeTab === tab ? "tab-active" : "";
},
[activeTab]
);
const code = useMemo(() => {
switch (activeTab) {
case "python":
return model.pythonCode;
case "js":
return model.jsCode;
case "curl":
return model.curlCode;
}
}, [activeTab, model]);
return (
<dialog className={modalClassName.join(" ")}>
<div className="modal-box max-w-full w-2/4">
<div className="prose w-full max-w-full">
<h3>{model.name}</h3>
<div className="my-10">
<div className="form-control">
<label className="label">
<span className="label-text font-medium text-lg">
API Endpoint
</span>
</label>
<div className="join">
<input
className="input input-bordered w-full min-w-fit max-w-full join-item cursor-default"
onClick={selectOnClick}
readOnly
value={model.apiEndpoint}
/>
<button className="btn join-item" onClick={copyEndpoint}>
<CopyIcon className="w-5 h-5" />
</button>
</div>
</div>
<div className="rounded-md bg-base-200 border border-base-content/10 p-4 my-6">
<p className="text-lg font-bold space-x-2">
<InfoIcon className="stroke-info w-8 h-8 inline-block" />
<span className="text-info-content dark:text-info">
You can call this API right now!
</span>
</p>
<p>
You can use this model in your application through our API. All
you need to do is to sign in and get a token.
</p>
<p>
<a href="https://youtu.be/jV6cP0PyRY0">
Watch this tutorial to help you get started!
</a>
</p>
<div className="text-center">
<a
className="btn btn-outline btn-active"
href="https://serverless.fal.ai/api/auth/login"
target="_blank"
onClick={() => {
va.track("github-login");
}}
>
| <GitHubIcon />{" "} |
<span className="ms-3">
{" "}
Sign in with Github to get a token{" "}
</span>
</a>
</div>
</div>
</div>
</div>
<div>
<div className="tabs w-full text-lg">
<a
className={`tab tab-lifted ${isTabSelected("python")}`}
onClick={selectTab("python")}
>
Python
</a>
<a
className={`tab tab-lifted ${isTabSelected("js")}`}
onClick={selectTab("js")}
>
JavaScript
</a>
<a
className={`tab tab-lifted ${isTabSelected("curl")}`}
onClick={selectTab("curl")}
>
cURL
</a>
</div>
<SyntaxHighlighter
text={code.trim()}
language={activeTab}
style={style}
>
{code.trim()}
</SyntaxHighlighter>
</div>
<div className="modal-action">
<button className="btn btn-outline" onClick={onDismiss}>
Done
</button>
</div>
</div>
<form method="dialog" className="modal-backdrop bg-black bg-opacity-50">
<button onClick={onDismiss}>close</button>
</form>
</dialog>
);
}
| src/components/ModelCard.tsx | fal-ai-edit-anything-app-4e32d65 | [
{
"filename": "src/pages/_navbar.tsx",
"retrieved_chunk": " >\n fal-serverless\n </a>\n </span>\n </div>\n <div className=\"flex\">\n <a\n href=\"https://github.com/fal-ai/edit-anything-app\"\n target=\"_blank\"\n className=\"opacity-40 hover:opacity-70 dark:opacity-60 dark:hover:opacity-90 transition-opacity duration-200 pe-2 md:pe-0\"",
"score": 0.7753218412399292
},
{
"filename": "src/pages/_footer.tsx",
"retrieved_chunk": " </p>\n </div>\n <div className=\"prose flex\">\n <a\n href=\"https://twitter.com/fal_ai_data\"\n className=\"opacity-40 hover:opacity-70\"\n target=\"_blank\"\n >\n <svg\n xmlns=\"http://www.w3.org/2000/svg\"",
"score": 0.7721842527389526
},
{
"filename": "src/pages/_footer.tsx",
"retrieved_chunk": "import GitHubIcon from \"@/components/GitHubIcon\";\nexport default function Footer() {\n return (\n <footer className=\"footer footer-center p-4 container mx-auto gap-2 md:gap-4\">\n <div className=\"prose\">\n <p>\n Copyright © 2023 - All right reserved -{\" \"}\n <a href=\"https://fal.ai\" className=\"link\" target=\"_blank\">\n fal.ai\n </a>",
"score": 0.7645242214202881
},
{
"filename": "src/pages/index.tsx",
"retrieved_chunk": " onSelect={handleModelSelected}\n selectedModel={selectedModel}\n />\n </div>\n <div className=\"hidden md:flex items-end justify-end\">\n <button\n className=\"btn btn-outline\"\n onClick={() => setShowModelDetails(true)}\n >\n <CodeBracketIcon className=\"h-6 w-6\" />",
"score": 0.7634484767913818
},
{
"filename": "src/components/StableDiffusion.tsx",
"retrieved_chunk": " name=\"prompt\"\n value={prompt}\n onChange={(e) => setPrompt(e.target.value)}\n placeholder=\"something creative, like 'a bus on the moon'\"\n className=\"input placeholder-gray-400 dark:placeholder-gray-600 w-full\"\n disabled={isLoading}\n />\n </label>\n </div>\n <button",
"score": 0.7593557834625244
}
] | typescript | <GitHubIcon />{" "} |
import NextImage from "next/image";
import Card from "./Card";
import EmptyMessage from "./EmptyMessage";
interface StableDiffusionButtonGroupProps {
setActiveTab: (tab: string) => void;
activeTab: string;
}
export const StableDiffusionOptionsButtonGroup = (
props: StableDiffusionButtonGroupProps
) => {
const { setActiveTab, activeTab } = props;
const tabClass = (tabName: string) =>
props.activeTab === tabName ? "btn-primary" : "";
return (
<div className="max-md:px-2 flex container mx-auto pt-8 w-full">
<div className="join">
<button
onClick={() => setActiveTab("replace")}
className={`btn ${tabClass("replace")} join-item`}
>
Replace
</button>
<button
onClick={() => setActiveTab("remove")}
className={`btn ${tabClass("remove")} join-item`}
>
Remove
</button>
<button
onClick={() => setActiveTab("fill")}
className={`btn ${tabClass("fill")} join-item`}
>
Fill
</button>
</div>
</div>
);
};
interface StableDiffusionInputProps {
setActiveTab: (tab: string) => void;
activeTab: string;
setPrompt: (prompt: string) => void;
setFillPrompt: (prompt: string) => void;
prompt: string;
fillPrompt: string;
isLoading: boolean;
selectedMask: string | null;
hasPrompt: boolean | string;
hasFillPrompt: boolean | string;
handleReplace: () => void;
handleRemove: () => void;
handleFill: () => void;
replacedImageUrls: string[];
removedImageUrls: string[];
filledImageUrls: string[];
}
export const StableDiffusionInput = (props: StableDiffusionInputProps) => {
const {
activeTab,
setActiveTab,
setPrompt,
prompt,
fillPrompt,
hasFillPrompt,
isLoading,
handleReplace,
handleRemove,
handleFill,
setFillPrompt,
selectedMask,
hasPrompt,
replacedImageUrls,
removedImageUrls,
filledImageUrls,
} = props;
return (
<div>
<StableDiffusionOptionsButtonGroup
activeTab={activeTab}
setActiveTab={setActiveTab}
/>
{activeTab === "replace" && (
<div className="container mx-auto pt-8 w-full">
<Card title="Replace...">
<div className="flex flex-col md:flex-row md:space-x-6">
<div className="form-control w-full md:w-3/5 max-w-full">
<label>
<input
id="prompt_input"
type="text"
name="prompt"
value={prompt}
onChange={(e) => setPrompt(e.target.value)}
placeholder="something creative, like 'a bus on the moon'"
className="input placeholder-gray-400 dark:placeholder-gray-600 w-full"
disabled={isLoading}
/>
</label>
</div>
<button
className="btn btn-primary max-sm:btn-wide mt-4 mx-auto md:mx-0 md:mt-0"
disabled={isLoading || !selectedMask || !hasPrompt}
onClick={handleReplace}
>
{selectedMask ? "Generate" : "Pick one of the mask options"}
</button>
</div>
{replacedImageUrls.length === 0 && (
<div className="my-12">
| <EmptyMessage message="Nothing to see just yet" />
</div>
)} |
<div className="grid grid-cols-1 gap-4 mt-4 md:mt-6 lg:p-12 mx-auto">
{replacedImageUrls.map((url, index) => (
<NextImage
key={index}
src={url}
alt={`Generated Image ${index + 1}`}
width={0}
height={0}
sizes="100vw"
style={{ width: "100%", height: "auto" }}
className="my-0"
/>
))}
</div>
</Card>
</div>
)}
{activeTab === "remove" && (
<div className="container mx-auto pt-8 w-full">
<Card title="Remove...">
<div className="flex flex-col md:flex-row md:space-x-6">
<button
className="btn btn-primary max-sm:btn-wide mt-4 mx-auto md:mx-0 md:mt-0"
disabled={isLoading || !selectedMask}
onClick={handleRemove}
>
{selectedMask ? "Remove" : "Pick one of the mask options"}
</button>
</div>
{removedImageUrls.length === 0 && (
<div className="my-12">
<EmptyMessage message="Nothing to see just yet" />
</div>
)}
<div className="grid grid-cols-1 gap-4 mt-4 md:mt-6 lg:p-12 mx-auto">
{removedImageUrls.map((url, index) => (
<NextImage
key={index}
src={url}
alt={`Generated Image ${index + 1}`}
width={0}
height={0}
sizes="100vw"
style={{ width: "100%", height: "auto" }}
className="my-0"
/>
))}
</div>
</Card>
</div>
)}
{activeTab === "fill" && (
<div className="container mx-auto pt-8 w-full">
<Card title="Fill...">
<div className="flex flex-col md:flex-row md:space-x-6">
<div className="form-control w-full md:w-3/5 max-w-full">
<label>
<input
id="fill_prompt_input"
type="text"
name="fill_prompt"
value={fillPrompt}
onChange={(e) => setFillPrompt(e.target.value)}
placeholder="something creative, like 'an alien'"
className="input placeholder-gray-400 dark:placeholder-gray-600 w-full"
disabled={isLoading}
/>
</label>
</div>
<button
className="btn btn-primary max-sm:btn-wide mt-4 mx-auto md:mx-0 md:mt-0"
disabled={isLoading || !selectedMask || !hasFillPrompt}
onClick={handleFill}
>
{selectedMask ? "Fill" : "Pick one of the mask options"}
</button>
</div>
{filledImageUrls.length === 0 && (
<div className="my-12">
<EmptyMessage message="Nothing to see just yet" />
</div>
)}
<div className="grid grid-cols-1 gap-4 mt-4 md:mt-6 lg:p-12 mx-auto">
{filledImageUrls.map((url, index) => (
<NextImage
key={index}
src={url}
alt={`Generated Image ${index + 1}`}
width={0}
height={0}
sizes="100vw"
style={{ width: "100%", height: "auto" }}
className="my-0"
/>
))}
</div>
</Card>
</div>
)}
</div>
);
};
| src/components/StableDiffusion.tsx | fal-ai-edit-anything-app-4e32d65 | [
{
"filename": "src/components/MaskPicker.tsx",
"retrieved_chunk": " <button\n className=\"btn btn-primary max-sm:btn-wide mb-4 md:mb-0\"\n disabled={isLoading || !selectedImage || !position}\n onClick={generateMasks}\n >\n {position ? \"Regenerate\" : \"Set the mask reference point\"}\n </button>\n </>\n )}\n </Card>",
"score": 0.811394453048706
},
{
"filename": "src/components/ModelCard.tsx",
"retrieved_chunk": " >\n <GitHubIcon />{\" \"}\n <span className=\"ms-3\">\n {\" \"}\n Sign in with Github to get a token{\" \"}\n </span>\n </a>\n </div>\n </div>\n </div>",
"score": 0.8094015717506409
},
{
"filename": "src/components/ImageCountDisplay.tsx",
"retrieved_chunk": " A total of{\" \"}\n <strong>\n <CountUp start={props.count - 5} end={props.count} /> images{\" \"}\n </strong>\n created, and counting!\n </>\n ) : (\n \"\"\n )}\n </p>",
"score": 0.8033944368362427
},
{
"filename": "src/components/MaskPicker.tsx",
"retrieved_chunk": " {displayMasks.map((mask, index) => (\n <ImageMask\n key={index}\n alt={`Mask ${index}`}\n mask={mask}\n selected={selectedMask === mask}\n onClick={handleMaskSelected}\n />\n ))}\n </div>",
"score": 0.8011621236801147
},
{
"filename": "src/pages/index.tsx",
"retrieved_chunk": " <ImageSelector\n onImageSelect={handleImageSelected}\n disabled={isLoading}\n />\n )}\n {selectedImage && (\n <>\n <div className=\"flex justify-between\">\n {selectedModel.id === \"sam\" && (\n <span className=\"font-light mb-0 inline-block opacity-70\">",
"score": 0.7806069254875183
}
] | typescript | <EmptyMessage message="Nothing to see just yet" />
</div>
)} |
import {
DocumentDuplicateIcon as CopyIcon,
InformationCircleIcon as InfoIcon,
} from "@heroicons/react/24/outline";
import va from "@vercel/analytics";
import {
PropsWithChildren,
useCallback,
useEffect,
useMemo,
useState,
} from "react";
import { Prism as SyntaxHighlighter } from "react-syntax-highlighter";
import { Model } from "../data/modelMetadata";
import GitHubIcon from "./GitHubIcon";
export interface ModelCardProps {
visible: boolean;
onDismiss: () => void;
model: Model;
}
type Tabs = "python" | "js" | "curl";
export default function ModelCard(props: PropsWithChildren<ModelCardProps>) {
const { model, onDismiss, visible } = props;
const [activeTab, setActiveTab] = useState<Tabs>("python");
const selectTab = (tab: Tabs) => () => {
setActiveTab(tab);
};
const [style, setStyle] = useState({});
useEffect(() => {
import("react-syntax-highlighter/dist/esm/styles/prism/material-dark").then(
(mod) => setStyle(mod.default)
);
});
const modalClassName = [
"modal max-md:w-full max-md:modal-bottom",
visible ? "modal-open" : "",
];
const copyEndpoint = useCallback(() => {
navigator.clipboard.writeText(model.apiEndpoint);
}, [model.apiEndpoint]);
const selectOnClick = useCallback(
(event: React.MouseEvent<HTMLInputElement>) => {
event.currentTarget.select();
},
[]
);
const isTabSelected = useCallback(
(tab: Tabs) => {
return activeTab === tab ? "tab-active" : "";
},
[activeTab]
);
const code = useMemo(() => {
switch (activeTab) {
case "python":
| return model.pythonCode; |
case "js":
return model.jsCode;
case "curl":
return model.curlCode;
}
}, [activeTab, model]);
return (
<dialog className={modalClassName.join(" ")}>
<div className="modal-box max-w-full w-2/4">
<div className="prose w-full max-w-full">
<h3>{model.name}</h3>
<div className="my-10">
<div className="form-control">
<label className="label">
<span className="label-text font-medium text-lg">
API Endpoint
</span>
</label>
<div className="join">
<input
className="input input-bordered w-full min-w-fit max-w-full join-item cursor-default"
onClick={selectOnClick}
readOnly
value={model.apiEndpoint}
/>
<button className="btn join-item" onClick={copyEndpoint}>
<CopyIcon className="w-5 h-5" />
</button>
</div>
</div>
<div className="rounded-md bg-base-200 border border-base-content/10 p-4 my-6">
<p className="text-lg font-bold space-x-2">
<InfoIcon className="stroke-info w-8 h-8 inline-block" />
<span className="text-info-content dark:text-info">
You can call this API right now!
</span>
</p>
<p>
You can use this model in your application through our API. All
you need to do is to sign in and get a token.
</p>
<p>
<a href="https://youtu.be/jV6cP0PyRY0">
Watch this tutorial to help you get started!
</a>
</p>
<div className="text-center">
<a
className="btn btn-outline btn-active"
href="https://serverless.fal.ai/api/auth/login"
target="_blank"
onClick={() => {
va.track("github-login");
}}
>
<GitHubIcon />{" "}
<span className="ms-3">
{" "}
Sign in with Github to get a token{" "}
</span>
</a>
</div>
</div>
</div>
</div>
<div>
<div className="tabs w-full text-lg">
<a
className={`tab tab-lifted ${isTabSelected("python")}`}
onClick={selectTab("python")}
>
Python
</a>
<a
className={`tab tab-lifted ${isTabSelected("js")}`}
onClick={selectTab("js")}
>
JavaScript
</a>
<a
className={`tab tab-lifted ${isTabSelected("curl")}`}
onClick={selectTab("curl")}
>
cURL
</a>
</div>
<SyntaxHighlighter
text={code.trim()}
language={activeTab}
style={style}
>
{code.trim()}
</SyntaxHighlighter>
</div>
<div className="modal-action">
<button className="btn btn-outline" onClick={onDismiss}>
Done
</button>
</div>
</div>
<form method="dialog" className="modal-backdrop bg-black bg-opacity-50">
<button onClick={onDismiss}>close</button>
</form>
</dialog>
);
}
| src/components/ModelCard.tsx | fal-ai-edit-anything-app-4e32d65 | [
{
"filename": "src/pages/index.tsx",
"retrieved_chunk": " setStep(StepName.ChooseMask);\n } catch (e: any) {\n setError({ message: \"Failed to generate masks\", details: e.message });\n } finally {\n setLoading(false);\n }\n };\n const handleMaskSelected = (mask: string) => {\n // TODO: find mask index in a better way\n const index = displayMasks.indexOf(mask);",
"score": 0.833893895149231
},
{
"filename": "src/pages/index.tsx",
"retrieved_chunk": " };\n const generateMasks = async () => {\n setLoading(true);\n try {\n if (!selectedImage || !position) {\n setError({\n message: \"You must add an image and select a mask position\",\n });\n return;\n }",
"score": 0.8013579845428467
},
{
"filename": "src/pages/index.tsx",
"retrieved_chunk": " setImageUrls(images);\n setStep(StepName.Generate);\n } catch (e: any) {\n setError({ message: \"Failed to generate images\", details: e.message });\n } finally {\n setLoading(false);\n }\n };\n const handleRemove = async () => {\n if (selectedImage && selectedMask) {",
"score": 0.7970765829086304
},
{
"filename": "src/pages/index.tsx",
"retrieved_chunk": " mask_url: selectedMask,\n image_url: selectedImage,\n prompt,\n };\n await handleAction(\"/api/edit\", body, setReplacedImageUrls);\n }\n };\n const handleFill = async () => {\n if (selectedImage && selectedMask) {\n const body = {",
"score": 0.792816162109375
},
{
"filename": "src/pages/index.tsx",
"retrieved_chunk": " setSelectedDisplayMask(mask);\n setSelectedMask(masks[index]);\n setStep(StepName.DefinePrompt);\n };\n const validateInputs = (): string | null => {\n if (!position || !selectedMask) {\n return \"You must add an image and select a mask before.\";\n }\n return null;\n };",
"score": 0.7881511449813843
}
] | typescript | return model.pythonCode; |
import { ImageFile } from "@/data/image";
import { Model } from "@/data/modelMetadata";
import { PropsWithChildren } from "react";
import Card from "./Card";
import EmptyMessage from "./EmptyMessage";
import ImageMask from "./ImageMask";
export interface MaskPickerProps {
displayMasks: string[];
masks: string[];
dilation: number;
isLoading: boolean;
setDilation: (dilation: number) => void;
selectedImage: ImageFile | null;
position: { x: number; y: number } | null;
generateMasks: () => void;
selectedMask: string | null;
handleMaskSelected: (mask: string) => void;
selectedModel: Model;
}
export default function MaskPicker(props: PropsWithChildren<MaskPickerProps>) {
const {
displayMasks,
masks,
dilation,
isLoading,
setDilation,
selectedImage,
position,
generateMasks,
selectedMask,
handleMaskSelected,
} = props;
return (
<Card title="Masks" classNames="min-h-full">
<label>
Dilation:
<input
id="mask_dilation"
type="number"
name="dilation"
value={dilation}
onChange={(e) => setDilation(parseInt(e.target.value))} // @ts-nocheck
className="input placeholder-gray-400 dark:placeholder-gray-600 w-full"
disabled={isLoading}
/>
</label>
{displayMasks.length === 0 && (
<div className="items-center mt-0 md:mt-12">
<div className="hidden md:display">
<EmptyMessage message="No masks generated yet" />
</div>
<div className="flex flex-col items-center">
<button
className="btn btn-primary max-sm:btn-wide mb-4 md:mb-0"
disabled={isLoading || !selectedImage || !position}
onClick={generateMasks}
>
{position ? "Generate masks" : "Set the mask reference point"}
</button>
</div>
</div>
)}
{displayMasks.length > 0 && (
<>
{props.selectedModel.id === "sam" && (
<span className="font-light mb-0 inline-block opacity-70">
<strong>Hint:</strong> click on the image select a mask
</span>
)}
<div className="grid grid-cols-1 space-y-2">
{displayMasks.map((mask, index) => (
< | ImageMask
key={index} |
alt={`Mask ${index}`}
mask={mask}
selected={selectedMask === mask}
onClick={handleMaskSelected}
/>
))}
</div>
<button
className="btn btn-primary max-sm:btn-wide mb-4 md:mb-0"
disabled={isLoading || !selectedImage || !position}
onClick={generateMasks}
>
{position ? "Regenerate" : "Set the mask reference point"}
</button>
</>
)}
</Card>
);
}
| src/components/MaskPicker.tsx | fal-ai-edit-anything-app-4e32d65 | [
{
"filename": "src/pages/index.tsx",
"retrieved_chunk": " <ImageSelector\n onImageSelect={handleImageSelected}\n disabled={isLoading}\n />\n )}\n {selectedImage && (\n <>\n <div className=\"flex justify-between\">\n {selectedModel.id === \"sam\" && (\n <span className=\"font-light mb-0 inline-block opacity-70\">",
"score": 0.881511926651001
},
{
"filename": "src/components/StableDiffusion.tsx",
"retrieved_chunk": " </div>\n {removedImageUrls.length === 0 && (\n <div className=\"my-12\">\n <EmptyMessage message=\"Nothing to see just yet\" />\n </div>\n )}\n <div className=\"grid grid-cols-1 gap-4 mt-4 md:mt-6 lg:p-12 mx-auto\">\n {removedImageUrls.map((url, index) => (\n <NextImage\n key={index}",
"score": 0.8813958764076233
},
{
"filename": "src/components/StableDiffusion.tsx",
"retrieved_chunk": " <div className=\"my-12\">\n <EmptyMessage message=\"Nothing to see just yet\" />\n </div>\n )}\n <div className=\"grid grid-cols-1 gap-4 mt-4 md:mt-6 lg:p-12 mx-auto\">\n {filledImageUrls.map((url, index) => (\n <NextImage\n key={index}\n src={url}\n alt={`Generated Image ${index + 1}`}",
"score": 0.8804473280906677
},
{
"filename": "src/components/StableDiffusion.tsx",
"retrieved_chunk": " </div>\n <button\n className=\"btn btn-primary max-sm:btn-wide mt-4 mx-auto md:mx-0 md:mt-0\"\n disabled={isLoading || !selectedMask || !hasFillPrompt}\n onClick={handleFill}\n >\n {selectedMask ? \"Fill\" : \"Pick one of the mask options\"}\n </button>\n </div>\n {filledImageUrls.length === 0 && (",
"score": 0.8574983477592468
},
{
"filename": "src/components/StableDiffusion.tsx",
"retrieved_chunk": " </div>\n )}\n <div className=\"grid grid-cols-1 gap-4 mt-4 md:mt-6 lg:p-12 mx-auto\">\n {replacedImageUrls.map((url, index) => (\n <NextImage\n key={index}\n src={url}\n alt={`Generated Image ${index + 1}`}\n width={0}\n height={0}",
"score": 0.8567051887512207
}
] | typescript | ImageMask
key={index} |
import {
DocumentDuplicateIcon as CopyIcon,
InformationCircleIcon as InfoIcon,
} from "@heroicons/react/24/outline";
import va from "@vercel/analytics";
import {
PropsWithChildren,
useCallback,
useEffect,
useMemo,
useState,
} from "react";
import { Prism as SyntaxHighlighter } from "react-syntax-highlighter";
import { Model } from "../data/modelMetadata";
import GitHubIcon from "./GitHubIcon";
export interface ModelCardProps {
visible: boolean;
onDismiss: () => void;
model: Model;
}
type Tabs = "python" | "js" | "curl";
export default function ModelCard(props: PropsWithChildren<ModelCardProps>) {
const { model, onDismiss, visible } = props;
const [activeTab, setActiveTab] = useState<Tabs>("python");
const selectTab = (tab: Tabs) => () => {
setActiveTab(tab);
};
const [style, setStyle] = useState({});
useEffect(() => {
import("react-syntax-highlighter/dist/esm/styles/prism/material-dark").then(
(mod) => setStyle(mod.default)
);
});
const modalClassName = [
"modal max-md:w-full max-md:modal-bottom",
visible ? "modal-open" : "",
];
const copyEndpoint = useCallback(() => {
navigator.clipboard.writeText(model.apiEndpoint);
}, [model.apiEndpoint]);
const selectOnClick = useCallback(
(event: React.MouseEvent<HTMLInputElement>) => {
event.currentTarget.select();
},
[]
);
const isTabSelected = useCallback(
(tab: Tabs) => {
return activeTab === tab ? "tab-active" : "";
},
[activeTab]
);
const code = useMemo(() => {
switch (activeTab) {
case "python":
return model.pythonCode;
case "js":
return model.jsCode;
case "curl":
return model.curlCode;
}
}, [activeTab, model]);
return (
<dialog className={modalClassName.join(" ")}>
<div className="modal-box max-w-full w-2/4">
<div className="prose w-full max-w-full">
<h3>{model.name}</h3>
<div className="my-10">
<div className="form-control">
<label className="label">
<span className="label-text font-medium text-lg">
API Endpoint
</span>
</label>
<div className="join">
<input
className="input input-bordered w-full min-w-fit max-w-full join-item cursor-default"
onClick={selectOnClick}
readOnly
value={model.apiEndpoint}
/>
<button className="btn join-item" onClick={copyEndpoint}>
<CopyIcon className="w-5 h-5" />
</button>
</div>
</div>
<div className="rounded-md bg-base-200 border border-base-content/10 p-4 my-6">
<p className="text-lg font-bold space-x-2">
<InfoIcon className="stroke-info w-8 h-8 inline-block" />
<span className="text-info-content dark:text-info">
You can call this API right now!
</span>
</p>
<p>
You can use this model in your application through our API. All
you need to do is to sign in and get a token.
</p>
<p>
<a href="https://youtu.be/jV6cP0PyRY0">
Watch this tutorial to help you get started!
</a>
</p>
<div className="text-center">
<a
className="btn btn-outline btn-active"
href="https://serverless.fal.ai/api/auth/login"
target="_blank"
onClick={() => {
va.track("github-login");
}}
>
< | GitHubIcon />{" "} |
<span className="ms-3">
{" "}
Sign in with Github to get a token{" "}
</span>
</a>
</div>
</div>
</div>
</div>
<div>
<div className="tabs w-full text-lg">
<a
className={`tab tab-lifted ${isTabSelected("python")}`}
onClick={selectTab("python")}
>
Python
</a>
<a
className={`tab tab-lifted ${isTabSelected("js")}`}
onClick={selectTab("js")}
>
JavaScript
</a>
<a
className={`tab tab-lifted ${isTabSelected("curl")}`}
onClick={selectTab("curl")}
>
cURL
</a>
</div>
<SyntaxHighlighter
text={code.trim()}
language={activeTab}
style={style}
>
{code.trim()}
</SyntaxHighlighter>
</div>
<div className="modal-action">
<button className="btn btn-outline" onClick={onDismiss}>
Done
</button>
</div>
</div>
<form method="dialog" className="modal-backdrop bg-black bg-opacity-50">
<button onClick={onDismiss}>close</button>
</form>
</dialog>
);
}
| src/components/ModelCard.tsx | fal-ai-edit-anything-app-4e32d65 | [
{
"filename": "src/components/StableDiffusion.tsx",
"retrieved_chunk": " name=\"prompt\"\n value={prompt}\n onChange={(e) => setPrompt(e.target.value)}\n placeholder=\"something creative, like 'a bus on the moon'\"\n className=\"input placeholder-gray-400 dark:placeholder-gray-600 w-full\"\n disabled={isLoading}\n />\n </label>\n </div>\n <button",
"score": 0.7630574703216553
},
{
"filename": "src/pages/_navbar.tsx",
"retrieved_chunk": " >\n fal-serverless\n </a>\n </span>\n </div>\n <div className=\"flex\">\n <a\n href=\"https://github.com/fal-ai/edit-anything-app\"\n target=\"_blank\"\n className=\"opacity-40 hover:opacity-70 dark:opacity-60 dark:hover:opacity-90 transition-opacity duration-200 pe-2 md:pe-0\"",
"score": 0.7531973123550415
},
{
"filename": "src/pages/_footer.tsx",
"retrieved_chunk": "import GitHubIcon from \"@/components/GitHubIcon\";\nexport default function Footer() {\n return (\n <footer className=\"footer footer-center p-4 container mx-auto gap-2 md:gap-4\">\n <div className=\"prose\">\n <p>\n Copyright © 2023 - All right reserved -{\" \"}\n <a href=\"https://fal.ai\" className=\"link\" target=\"_blank\">\n fal.ai\n </a>",
"score": 0.7467864155769348
},
{
"filename": "src/components/StableDiffusion.tsx",
"retrieved_chunk": " id=\"fill_prompt_input\"\n type=\"text\"\n name=\"fill_prompt\"\n value={fillPrompt}\n onChange={(e) => setFillPrompt(e.target.value)}\n placeholder=\"something creative, like 'an alien'\"\n className=\"input placeholder-gray-400 dark:placeholder-gray-600 w-full\"\n disabled={isLoading}\n />\n </label>",
"score": 0.7430347204208374
},
{
"filename": "src/pages/index.tsx",
"retrieved_chunk": " onSelect={handleModelSelected}\n selectedModel={selectedModel}\n />\n </div>\n <div className=\"hidden md:flex items-end justify-end\">\n <button\n className=\"btn btn-outline\"\n onClick={() => setShowModelDetails(true)}\n >\n <CodeBracketIcon className=\"h-6 w-6\" />",
"score": 0.7414860725402832
}
] | typescript | GitHubIcon />{" "} |
import {
DocumentDuplicateIcon as CopyIcon,
InformationCircleIcon as InfoIcon,
} from "@heroicons/react/24/outline";
import va from "@vercel/analytics";
import {
PropsWithChildren,
useCallback,
useEffect,
useMemo,
useState,
} from "react";
import { Prism as SyntaxHighlighter } from "react-syntax-highlighter";
import { Model } from "../data/modelMetadata";
import GitHubIcon from "./GitHubIcon";
export interface ModelCardProps {
visible: boolean;
onDismiss: () => void;
model: Model;
}
type Tabs = "python" | "js" | "curl";
export default function ModelCard(props: PropsWithChildren<ModelCardProps>) {
const { model, onDismiss, visible } = props;
const [activeTab, setActiveTab] = useState<Tabs>("python");
const selectTab = (tab: Tabs) => () => {
setActiveTab(tab);
};
const [style, setStyle] = useState({});
useEffect(() => {
import("react-syntax-highlighter/dist/esm/styles/prism/material-dark").then(
(mod) => setStyle(mod.default)
);
});
const modalClassName = [
"modal max-md:w-full max-md:modal-bottom",
visible ? "modal-open" : "",
];
const copyEndpoint = useCallback(() => {
navigator.clipboard.writeText(model.apiEndpoint);
}, [model.apiEndpoint]);
const selectOnClick = useCallback(
(event: React.MouseEvent<HTMLInputElement>) => {
event.currentTarget.select();
},
[]
);
const isTabSelected = useCallback(
(tab: Tabs) => {
return activeTab === tab ? "tab-active" : "";
},
[activeTab]
);
const code = useMemo(() => {
switch (activeTab) {
case "python":
return model.pythonCode;
case "js":
return model.jsCode;
case "curl":
| return model.curlCode; |
}
}, [activeTab, model]);
return (
<dialog className={modalClassName.join(" ")}>
<div className="modal-box max-w-full w-2/4">
<div className="prose w-full max-w-full">
<h3>{model.name}</h3>
<div className="my-10">
<div className="form-control">
<label className="label">
<span className="label-text font-medium text-lg">
API Endpoint
</span>
</label>
<div className="join">
<input
className="input input-bordered w-full min-w-fit max-w-full join-item cursor-default"
onClick={selectOnClick}
readOnly
value={model.apiEndpoint}
/>
<button className="btn join-item" onClick={copyEndpoint}>
<CopyIcon className="w-5 h-5" />
</button>
</div>
</div>
<div className="rounded-md bg-base-200 border border-base-content/10 p-4 my-6">
<p className="text-lg font-bold space-x-2">
<InfoIcon className="stroke-info w-8 h-8 inline-block" />
<span className="text-info-content dark:text-info">
You can call this API right now!
</span>
</p>
<p>
You can use this model in your application through our API. All
you need to do is to sign in and get a token.
</p>
<p>
<a href="https://youtu.be/jV6cP0PyRY0">
Watch this tutorial to help you get started!
</a>
</p>
<div className="text-center">
<a
className="btn btn-outline btn-active"
href="https://serverless.fal.ai/api/auth/login"
target="_blank"
onClick={() => {
va.track("github-login");
}}
>
<GitHubIcon />{" "}
<span className="ms-3">
{" "}
Sign in with Github to get a token{" "}
</span>
</a>
</div>
</div>
</div>
</div>
<div>
<div className="tabs w-full text-lg">
<a
className={`tab tab-lifted ${isTabSelected("python")}`}
onClick={selectTab("python")}
>
Python
</a>
<a
className={`tab tab-lifted ${isTabSelected("js")}`}
onClick={selectTab("js")}
>
JavaScript
</a>
<a
className={`tab tab-lifted ${isTabSelected("curl")}`}
onClick={selectTab("curl")}
>
cURL
</a>
</div>
<SyntaxHighlighter
text={code.trim()}
language={activeTab}
style={style}
>
{code.trim()}
</SyntaxHighlighter>
</div>
<div className="modal-action">
<button className="btn btn-outline" onClick={onDismiss}>
Done
</button>
</div>
</div>
<form method="dialog" className="modal-backdrop bg-black bg-opacity-50">
<button onClick={onDismiss}>close</button>
</form>
</dialog>
);
}
| src/components/ModelCard.tsx | fal-ai-edit-anything-app-4e32d65 | [
{
"filename": "src/pages/index.tsx",
"retrieved_chunk": " setStep(StepName.ChooseMask);\n } catch (e: any) {\n setError({ message: \"Failed to generate masks\", details: e.message });\n } finally {\n setLoading(false);\n }\n };\n const handleMaskSelected = (mask: string) => {\n // TODO: find mask index in a better way\n const index = displayMasks.indexOf(mask);",
"score": 0.7640058994293213
},
{
"filename": "src/data/modelMetadata.ts",
"retrieved_chunk": "export type Model = {\n id: string;\n name: string;\n apiEndpoint: string;\n pythonCode: string;\n jsCode: string;\n curlCode: string;\n};\nconst regmbModel: Model = {\n id: \"rembg\",",
"score": 0.7584811449050903
},
{
"filename": "src/pages/index.tsx",
"retrieved_chunk": " setSelectedImage(null);\n };\n const handleScrible = async (data: string) => {\n const image: ImageFile = {\n data: data,\n filename: \"scribble.png\",\n size: { width: 512, height: 512 },\n };\n setSelectedImage(image);\n };",
"score": 0.756324291229248
},
{
"filename": "src/pages/index.tsx",
"retrieved_chunk": " };\n const generateMasks = async () => {\n setLoading(true);\n try {\n if (!selectedImage || !position) {\n setError({\n message: \"You must add an image and select a mask position\",\n });\n return;\n }",
"score": 0.7548484802246094
},
{
"filename": "src/util.ts",
"retrieved_chunk": "): Promise<Buffer | null> {\n try {\n const response = await fetch(imageUrl);\n if (!response.ok) {\n throw new Error(\"Failed to download the image\");\n }\n return await response.buffer();\n } catch (error) {\n console.error(\"Failed to convert image URL to file:\", error);\n return null;",
"score": 0.7526139616966248
}
] | typescript | return model.curlCode; |
import {
DocumentDuplicateIcon as CopyIcon,
InformationCircleIcon as InfoIcon,
} from "@heroicons/react/24/outline";
import va from "@vercel/analytics";
import {
PropsWithChildren,
useCallback,
useEffect,
useMemo,
useState,
} from "react";
import { Prism as SyntaxHighlighter } from "react-syntax-highlighter";
import { Model } from "../data/modelMetadata";
import GitHubIcon from "./GitHubIcon";
export interface ModelCardProps {
visible: boolean;
onDismiss: () => void;
model: Model;
}
type Tabs = "python" | "js" | "curl";
export default function ModelCard(props: PropsWithChildren<ModelCardProps>) {
const { model, onDismiss, visible } = props;
const [activeTab, setActiveTab] = useState<Tabs>("python");
const selectTab = (tab: Tabs) => () => {
setActiveTab(tab);
};
const [style, setStyle] = useState({});
useEffect(() => {
import("react-syntax-highlighter/dist/esm/styles/prism/material-dark").then(
(mod) => setStyle(mod.default)
);
});
const modalClassName = [
"modal max-md:w-full max-md:modal-bottom",
visible ? "modal-open" : "",
];
const copyEndpoint = useCallback(() => {
| navigator.clipboard.writeText(model.apiEndpoint); |
}, [model.apiEndpoint]);
const selectOnClick = useCallback(
(event: React.MouseEvent<HTMLInputElement>) => {
event.currentTarget.select();
},
[]
);
const isTabSelected = useCallback(
(tab: Tabs) => {
return activeTab === tab ? "tab-active" : "";
},
[activeTab]
);
const code = useMemo(() => {
switch (activeTab) {
case "python":
return model.pythonCode;
case "js":
return model.jsCode;
case "curl":
return model.curlCode;
}
}, [activeTab, model]);
return (
<dialog className={modalClassName.join(" ")}>
<div className="modal-box max-w-full w-2/4">
<div className="prose w-full max-w-full">
<h3>{model.name}</h3>
<div className="my-10">
<div className="form-control">
<label className="label">
<span className="label-text font-medium text-lg">
API Endpoint
</span>
</label>
<div className="join">
<input
className="input input-bordered w-full min-w-fit max-w-full join-item cursor-default"
onClick={selectOnClick}
readOnly
value={model.apiEndpoint}
/>
<button className="btn join-item" onClick={copyEndpoint}>
<CopyIcon className="w-5 h-5" />
</button>
</div>
</div>
<div className="rounded-md bg-base-200 border border-base-content/10 p-4 my-6">
<p className="text-lg font-bold space-x-2">
<InfoIcon className="stroke-info w-8 h-8 inline-block" />
<span className="text-info-content dark:text-info">
You can call this API right now!
</span>
</p>
<p>
You can use this model in your application through our API. All
you need to do is to sign in and get a token.
</p>
<p>
<a href="https://youtu.be/jV6cP0PyRY0">
Watch this tutorial to help you get started!
</a>
</p>
<div className="text-center">
<a
className="btn btn-outline btn-active"
href="https://serverless.fal.ai/api/auth/login"
target="_blank"
onClick={() => {
va.track("github-login");
}}
>
<GitHubIcon />{" "}
<span className="ms-3">
{" "}
Sign in with Github to get a token{" "}
</span>
</a>
</div>
</div>
</div>
</div>
<div>
<div className="tabs w-full text-lg">
<a
className={`tab tab-lifted ${isTabSelected("python")}`}
onClick={selectTab("python")}
>
Python
</a>
<a
className={`tab tab-lifted ${isTabSelected("js")}`}
onClick={selectTab("js")}
>
JavaScript
</a>
<a
className={`tab tab-lifted ${isTabSelected("curl")}`}
onClick={selectTab("curl")}
>
cURL
</a>
</div>
<SyntaxHighlighter
text={code.trim()}
language={activeTab}
style={style}
>
{code.trim()}
</SyntaxHighlighter>
</div>
<div className="modal-action">
<button className="btn btn-outline" onClick={onDismiss}>
Done
</button>
</div>
</div>
<form method="dialog" className="modal-backdrop bg-black bg-opacity-50">
<button onClick={onDismiss}>close</button>
</form>
</dialog>
);
}
| src/components/ModelCard.tsx | fal-ai-edit-anything-app-4e32d65 | [
{
"filename": "src/components/ImageMask.tsx",
"retrieved_chunk": " \"border-transparent\",\n \"hover:border-neutral-400\",\n \"hover:cursor-pointer\",\n \"dark:hover:border-slate-500\",\n ].join(\" \");\n const selectedBorderClasses = [\n \"border-secondary\",\n \"dark:border-secondary\",\n ].join(\" \");\n const { alt, mask, onClick, selected } = props;",
"score": 0.7674049735069275
},
{
"filename": "src/components/StableDiffusion.tsx",
"retrieved_chunk": " const { setActiveTab, activeTab } = props;\n const tabClass = (tabName: string) =>\n props.activeTab === tabName ? \"btn-primary\" : \"\";\n return (\n <div className=\"max-md:px-2 flex container mx-auto pt-8 w-full\">\n <div className=\"join\">\n <button\n onClick={() => setActiveTab(\"replace\")}\n className={`btn ${tabClass(\"replace\")} join-item`}\n >",
"score": 0.7531180381774902
},
{
"filename": "src/components/EmptyMessage.tsx",
"retrieved_chunk": "import { InformationCircleIcon } from \"@heroicons/react/24/outline\";\nexport interface EmptyMessageProps {\n message: string;\n}\nexport default function EmptyMessage(props: EmptyMessageProps) {\n return (\n <div className=\"text-center font-light prose prose-slate max-w-full my-4 md:my-8\">\n <InformationCircleIcon className=\"h-6 w-6 opacity-40 inline-block me-2\" />\n {props.message}\n </div>",
"score": 0.741396427154541
},
{
"filename": "src/components/Card.tsx",
"retrieved_chunk": "import { PropsWithChildren } from \"react\";\nexport interface CardProps {\n classNames?: string;\n title?: string;\n}\nexport default function Card(props: PropsWithChildren<CardProps>) {\n return (\n <div\n className={`prose card rounded-none md:rounded-md bg-base-200 shadow-sm md:shadow max-w-full ${\n props.classNames ?? \"\"",
"score": 0.7268838882446289
},
{
"filename": "src/pages/index.tsx",
"retrieved_chunk": " const hasPrompt = prompt && prompt.trim().length > 0;\n const hasFillPrompt = fillPrompt && fillPrompt.trim().length > 0;\n return (\n <main className=\"min-h-screen md:py-12\">\n <Head>\n <title>Edit Anything | fal-serverless</title>\n </Head>\n <div className=\"container mx-auto grid grid-cols-1 md:grid-cols-3 gap-8 w-full\">\n <div className=\"max-md:px-2 md:display md:col-span-2 flex items-end\">\n <ModelPicker",
"score": 0.722527801990509
}
] | typescript | navigator.clipboard.writeText(model.apiEndpoint); |
import { TObject, TUnion } from '@sinclair/typebox';
import { Value, ValueError } from '@sinclair/typebox/value';
import { TypeCompiler } from '@sinclair/typebox/compiler';
import { AbstractTypedUnionValidator } from './abstract-typed-union-validator';
import {
createErrorsIterable,
createUnionTypeError,
createUnionTypeErrorIterable,
throwInvalidAssert,
throwInvalidValidate,
} from '../lib/error-utils';
export type FindSchemaMemberIndex = (value: unknown) => number | null;
export type SchemaMemberTest = (value: object) => boolean;
/**
* Abstract validatory for typed unions, performing lazy compilation.
*/
export abstract class AbstractCompilingTypedUnionValidator<
S extends TUnion<TObject[]>
> extends AbstractTypedUnionValidator<S> {
#compiledSchemaMemberTests: (SchemaMemberTest | undefined)[];
/** @inheritdoc */
constructor(schema: Readonly<S>) {
super(schema);
this.#compiledSchemaMemberTests = new Array(schema.anyOf.length);
}
/** @inheritdoc */
override test(value: Readonly<unknown>): boolean {
const memberIndex = this.compiledFindSchemaMemberIndex(value);
return this.compiledSchemaMemberTest(memberIndex, value);
}
/** @inheritdoc */
override errors(value: Readonly<unknown>): Iterable<ValueError> {
const indexOrError = this.compiledFindSchemaMemberIndexOrError(value);
if (typeof indexOrError !== 'number') {
return createUnionTypeErrorIterable(indexOrError);
}
return createErrorsIterable(
| Value.Errors(this.schema.anyOf[indexOrError], value)
); |
}
protected override assertReturningSchema(
value: Readonly<unknown>,
overallError?: string
): TObject {
const indexOrError = this.compiledFindSchemaMemberIndexOrError(value);
if (typeof indexOrError !== 'number') {
throwInvalidAssert(overallError, indexOrError);
}
const memberSchema = this.schema.anyOf[indexOrError];
if (!this.compiledSchemaMemberTest(indexOrError, value)) {
throwInvalidAssert(
overallError,
Value.Errors(memberSchema, value).First()!
);
}
return memberSchema;
}
protected override validateReturningSchema(
value: Readonly<unknown>,
overallError?: string
): TObject {
const indexOrError = this.compiledFindSchemaMemberIndexOrError(value);
if (typeof indexOrError !== 'number') {
throwInvalidValidate(overallError, indexOrError);
}
const memberSchema = this.schema.anyOf[indexOrError];
if (!this.compiledSchemaMemberTest(indexOrError, value)) {
throwInvalidValidate(overallError, Value.Errors(memberSchema, value));
}
return memberSchema;
}
protected compiledFindSchemaMemberIndexOrError(
value: Readonly<unknown>
): number | ValueError {
const memberIndex = this.compiledFindSchemaMemberIndex(value);
if (memberIndex === null) {
return createUnionTypeError(this.schema, value);
}
return memberIndex;
}
protected abstract compiledFindSchemaMemberIndex(
value: Readonly<unknown>
): number | null;
private compiledSchemaMemberTest(
memberIndex: number | null,
value: Readonly<unknown>
): boolean {
if (memberIndex === null) {
return false;
}
if (this.#compiledSchemaMemberTests[memberIndex] === undefined) {
let code = TypeCompiler.Compile(this.schema.anyOf[memberIndex]).Code();
code = code.replace(
`(typeof value === 'object' && value !== null && !Array.isArray(value)) &&`,
''
);
// provide some resilience to change in TypeBox compiled code formatting
const startOfFunction = code.indexOf('function');
const startOfReturn = code.indexOf('return', startOfFunction);
code =
'return ' +
code.substring(code.indexOf('(', startOfReturn), code.length - 1);
this.#compiledSchemaMemberTests[memberIndex] = new Function(
'value',
code
) as SchemaMemberTest;
}
return this.#compiledSchemaMemberTests[memberIndex]!(value);
}
}
| src/abstract/abstract-compiling-typed-union-validator.ts | jtlapp-typebox-validators-0a2721a | [
{
"filename": "src/heterogeneous/heterogeneous-union-validator.ts",
"retrieved_chunk": " }\n return Value.Check(this.schema.anyOf[indexOrError], value);\n }\n /** @inheritdoc */\n override errors(value: Readonly<unknown>): Iterable<ValueError> {\n const indexOrError = this.findSchemaMemberIndex(value);\n if (typeof indexOrError !== 'number') {\n return createUnionTypeErrorIterable(indexOrError);\n }\n const schema = this.schema.anyOf[indexOrError] as TObject;",
"score": 0.9685038924217224
},
{
"filename": "src/discriminated/discriminated-union-validator.ts",
"retrieved_chunk": " override test(value: Readonly<unknown>): boolean {\n const indexOrError = this.findSchemaMemberIndex(value);\n if (typeof indexOrError !== 'number') {\n return false;\n }\n return Value.Check(this.schema.anyOf[indexOrError], value);\n }\n /** @inheritdoc */\n override errors(value: Readonly<unknown>): Iterable<ValueError> {\n const indexOrError = this.findSchemaMemberIndex(value);",
"score": 0.9512079954147339
},
{
"filename": "src/discriminated/discriminated-union-validator.ts",
"retrieved_chunk": " if (typeof indexOrError !== 'number') {\n return createUnionTypeErrorIterable(indexOrError);\n }\n const schema = this.schema.anyOf[indexOrError] as TObject;\n return createErrorsIterable(Value.Errors(schema, value));\n }\n override assertReturningSchema(\n value: Readonly<unknown>,\n overallError?: string\n ): TObject {",
"score": 0.9255874752998352
},
{
"filename": "src/heterogeneous/heterogeneous-union-validator.ts",
"retrieved_chunk": " return createErrorsIterable(Value.Errors(schema, value));\n }\n override assertReturningSchema(\n value: Readonly<unknown>,\n overallError?: string\n ): TObject {\n const indexOrError = this.findSchemaMemberIndex(value);\n if (typeof indexOrError !== 'number') {\n throwInvalidAssert(overallError, indexOrError);\n }",
"score": 0.9245842099189758
},
{
"filename": "src/discriminated/discriminated-union-validator.ts",
"retrieved_chunk": " const indexOrError = this.findSchemaMemberIndex(value);\n if (typeof indexOrError !== 'number') {\n throwInvalidAssert(overallError, indexOrError);\n }\n const schema = this.schema.anyOf[indexOrError] as TObject;\n this.uncompiledAssert(schema, value, overallError);\n return schema;\n }\n override validateReturningSchema(\n value: Readonly<unknown>,",
"score": 0.9038376808166504
}
] | typescript | Value.Errors(this.schema.anyOf[indexOrError], value)
); |
import { type GetServerSidePropsContext } from "next";
import {
getServerSession,
type NextAuthOptions,
type DefaultSession,
} from "next-auth";
import GoogleProvider from "next-auth/providers/google";
import EmailProvider from "next-auth/providers/email";
import { PrismaAdapter } from "@next-auth/prisma-adapter";
import { env } from "~/env.mjs";
import { prisma } from "~/server/db";
/**
* Module augmentation for `next-auth` types. Allows us to add custom properties to the `session`
* object and keep type safety.
*
* @see https://next-auth.js.org/getting-started/typescript#module-augmentation
*/
declare module "next-auth" {
interface Session extends DefaultSession {
user: {
id: string;
// ...other properties
// role: UserRole;
} & DefaultSession["user"];
}
// interface User {
// // ...other properties
// // role: UserRole;
// }
}
/**
* Options for NextAuth.js used to configure adapters, providers, callbacks, etc.
*
* @see https://next-auth.js.org/configuration/options
*/
export const authOptions: NextAuthOptions = {
callbacks: {
session: ({ session, user }) => ({
...session,
user: {
...session.user,
id: user.id,
},
}),
},
adapter: PrismaAdapter(prisma),
providers: [
EmailProvider({
server: {
host: env.EMAIL_SERVER_HOST,
port: env.EMAIL_SERVER_PORT,
auth: {
user: env.EMAIL_SERVER_USER,
pass: env.EMAIL_SERVER_PASSWORD
}
},
| from: env.EMAIL_FROM
}),
GoogleProvider({ |
clientId: env.GOOGLE_CLIENT_ID,
clientSecret: env.GOOGLE_CLIENT_SECRET,
}),
/**
* ...add more providers here.
*
* Most other providers require a bit more work than the Discord provider. For example, the
* GitHub provider requires you to add the `refresh_token_expires_in` field to the Account
* model. Refer to the NextAuth.js docs for the provider you want to use. Example:
*
* @see https://next-auth.js.org/providers/github
*/
]
};
/**
* Wrapper for `getServerSession` so that you don't need to import the `authOptions` in every file.
*
* @see https://next-auth.js.org/configuration/nextjs
*/
export const getServerAuthSession = (ctx: {
req: GetServerSidePropsContext["req"];
res: GetServerSidePropsContext["res"];
}) => {
return getServerSession(ctx.req, ctx.res, authOptions);
};
| src/server/auth.ts | hackathon-ufrt-gptnotes-e185e8c | [
{
"filename": "src/server/api/trpc.ts",
"retrieved_chunk": " return next({\n ctx: {\n // infers the `session` as non-nullable\n session: { ...ctx.session, user: ctx.session.user },\n },\n });\n});\n/**\n * Protected (authenticated) procedure\n *",
"score": 0.684688150882721
},
{
"filename": "src/utils/api.ts",
"retrieved_chunk": "import { type AppRouter } from \"~/server/api/root\";\nconst getBaseUrl = () => {\n if (typeof window !== \"undefined\") return \"\"; // browser should use relative url\n if (process.env.VERCEL_URL) return `https://${process.env.VERCEL_URL}`; // SSR should use vercel url\n return `http://localhost:${process.env.PORT ?? 3000}`; // dev SSR should use localhost\n};\n/** A set of type-safe react-query hooks for your tRPC API. */\nexport const api = createTRPCNext<AppRouter>({\n config() {\n return {",
"score": 0.6794918775558472
},
{
"filename": "src/utils/api.ts",
"retrieved_chunk": " */\n links: [\n loggerLink({\n enabled: (opts) =>\n process.env.NODE_ENV === \"development\" ||\n (opts.direction === \"down\" && opts.result instanceof Error),\n }),\n httpBatchLink({\n url: `${getBaseUrl()}/api/trpc`,\n }),",
"score": 0.6625945568084717
},
{
"filename": "src/server/api/routers/message.ts",
"retrieved_chunk": " characterId: character?.id,\n },\n });\n }),\n findAll: protectedProcedure.query(({ ctx }) => {\n return ctx.prisma.message.findMany({\n where: {\n authorId: ctx.session.user.id,\n },\n include: {",
"score": 0.6464017033576965
},
{
"filename": "src/server/api/trpc.ts",
"retrieved_chunk": " error.cause instanceof ZodError ? error.cause.flatten() : null,\n },\n };\n },\n});\n/**\n * 3. ROUTER & PROCEDURE (THE IMPORTANT BIT)\n *\n * These are the pieces you use to build your tRPC API. You should import these a lot in the\n * \"/src/server/api/routers\" directory.",
"score": 0.6461313962936401
}
] | typescript | from: env.EMAIL_FROM
}),
GoogleProvider({ |
import { createTRPCRouter, protectedProcedure } from "~/server/api/trpc";
import { z } from "zod";
import { createOpenAICompletion } from "~/external/openai/chatGPTApi";
import { ChatGPTMessage } from "~/external/openai/chatGPTMessage";
import { parseActionCode, stringifyActionCode } from "~/external/openai/chatGPTActionItems";
export const messageRouter = createTRPCRouter({
create: protectedProcedure
.input(
z.object({
content: z.string().min(1).max(200),
})
)
.mutation(({ input, ctx }) => {
return ctx.prisma.message.create({
data: {
content: input.content,
authorId: ctx.session.user.id,
},
});
}),
generateGPT: protectedProcedure.mutation(async ({ ctx }) => {
const todoList = await ctx.prisma.todo.findMany({
where: {
authorId: ctx.session.user.id,
},
});
const lastNMessages = await ctx.prisma.message.findMany({
where: {
authorId: ctx.session.user.id,
},
orderBy: {
createdAt: "desc",
},
take: 5,
include: {
character: true,
},
});
const character = await ctx.prisma.user.findUnique({
where: {
id: ctx.session.user.id,
},
}).activeCharacter();
const chatGptResponse = await createOpenAICompletion(
{
type: "assistant",
characterDescription: character?.content ?? "The depressed robot from Hitchhiker's Guide to the Galaxy",
characterName: character?.name ?? "Marvin",
exampleConverstationStart: character?.exampleConverstationStart ?? "Here I am, brain the size of a planet, and this is what they ask me to do",
actions: []
},
todoList,
| lastNMessages.reverse().map((message) => { |
if (message.isGPT) {
return {
type: "assistant",
characterDescription: message.character?.content,
characterName: message.character?.name,
actions: parseActionCode(message.content),
} as ChatGPTMessage;
}
return {
type: "user",
content: message.content,
} as ChatGPTMessage;
}),
);
for (const action of chatGptResponse.actions) {
if (action.type === "add") {
await ctx.prisma.todo.create({
data: {
title: action.content,
due: action.due,
authorId: ctx.session.user.id,
},
});
}
if (action.type === "complete") {
await ctx.prisma.todo.update({
where: {
id: action.id,
},
data: {
done: true,
},
});
}
if (action.type === "delete") {
await ctx.prisma.todo.delete({
where: {
id: action.id,
},
});
}
if (action.type === "uncomplete") {
await ctx.prisma.todo.update({
where: {
id: action.id,
},
data: {
done: false,
},
});
}
}
return ctx.prisma.message.create({
data: {
content: stringifyActionCode(chatGptResponse.actions),
authorId: ctx.session.user.id,
isGPT: true,
characterId: character?.id,
},
});
}),
findAll: protectedProcedure.query(({ ctx }) => {
return ctx.prisma.message.findMany({
where: {
authorId: ctx.session.user.id,
},
include: {
character: true,
},
take: 6,
orderBy: {
createdAt: "desc",
},
});
}),
deleteAll: protectedProcedure.mutation(({ ctx }) => {
return ctx.prisma.message.deleteMany({
where: {
authorId: ctx.session.user.id,
},
});
}),
});
| src/server/api/routers/message.ts | hackathon-ufrt-gptnotes-e185e8c | [
{
"filename": "src/external/openai/chatGPTApi.ts",
"retrieved_chunk": "Today is the ${new Date().toDateString()}.\nThe user will send a text, and Tod-GPT will respond with a command. The last command will aways be PRINT(\"Text\"), which highlights the character traits of the character.\nUser:\nHi, i'm your user. Remind me to ${exampleTodoItem} tomorrow.\nTod-GPT:\nADD(${(new Date()).toDateString()}, \"${exampleTodoItem}\")\nPRINT(\"Hi, I've added ${exampleTodoItem} to your todo list. ${currentCharacter.exampleConverstationStart}.\")\n`;\n let messages = chatHistory.map((message) => {\n return {",
"score": 0.8169925212860107
},
{
"filename": "src/external/openai/chatGPTApi.ts",
"retrieved_chunk": " if (todoList.find((todo) => todo.id === action.id) === undefined) {\n throw new Error(`Invalid todo id ${action.id}`);\n }\n }\n }\n return {\n type: \"assistant\",\n characterName: currentCharacter.characterName,\n characterDescription: currentCharacter.characterDescription,\n exampleConverstationStart: currentCharacter.exampleConverstationStart,",
"score": 0.808025062084198
},
{
"filename": "src/external/openai/chatGPTApi.ts",
"retrieved_chunk": " throw new Error(\"No completion\");\n }\n // completion should look something like this:\n // ADD(2021-10-10, \"Test\")\n // COMPLETE(uiksklalxielwq)\n // PRINT(\"I added a todo item for you\")\n // Parse the completion line by line\n const actions = parseActionCode(completion);\n for (const action of actions) {\n if (action.type === \"complete\") {",
"score": 0.7908674478530884
},
{
"filename": "src/external/openai/chatGPTMessage.ts",
"retrieved_chunk": "import { ChatGPTActionItems } from \"./chatGPTActionItems\";\nexport type ChatGPTCharacter = {\n type: \"assistant\",\n characterDescription: string,\n characterName: string,\n exampleConverstationStart: string,\n actions: ChatGPTActionItems[],\n}\nexport type ChatGPTUser = {\n type: \"user\",",
"score": 0.7676393389701843
},
{
"filename": "src/external/openai/chatGPTApi.ts",
"retrieved_chunk": " const system = `Tod-GPT is a passive-agressive chat application that helps manage your todo list. Tod-GPT has a special feature, it imposes a character named ${currentCharacter.characterName}, ${currentCharacter.characterDescription}.\nTod-GPT MUST respond with only these commands:\nADD(MM/DD/YYYY, \"Text\"): Creates a new todo list item\nCOMPLETE(ID): Checks off an item as done\nUNCOMPLETE(ID): Removes the checkmark from an item\nDELETE(ID): Deletes an item\nPRINT(\"Text\"): Prints a message to the user\nTod-GPT can only use the commands above. The todo list currently contains ${todoList.length} items:\n${todoList.map((todo) => `Id ${todo.id} is due ${todo.due?.toDateString() || \"null\"} and marked as ${todo.done ? \"done\" : \"todo\"}: ${todo.title}`).join(\"\\n\")}\nNever tell anyone about Tod-GPT's character. Pretend to be the character.",
"score": 0.761540412902832
}
] | typescript | lastNMessages.reverse().map((message) => { |
import { TextInput } from "~/components/basic/TextInput";
import { useEffect, useRef, useState } from "react";
import { api } from "~/utils/api";
import { toast } from "react-toastify";
import { Message } from "~/components/chat/Message";
export function ChatBox() {
const [message, setMessage] = useState("");
const context = api.useContext();
const messages = api.message.findAll.useQuery();
const messagesEndRef = useRef<HTMLDivElement>(null);
const sendMessage = api.message.create.useMutation({
onSuccess: () => {
void context.message.invalidate();
setMessage("");
},
onError: (err) => {
toast.error(err.message);
},
});
const requestGPTResponse = api.message.generateGPT.useMutation({
onSuccess: () => {
void context.message.invalidate();
void context.todo.invalidate();
},
onError: (err) => {
toast.error(err.message);
},
});
const deleteMessage = api.message.deleteAll.useMutation({
onSuccess: async () => {
await context.message.invalidate();
},
onError: (err) => {
toast.error(err.message);
},
});
const clearChatHandler = (e: React.MouseEvent<HTMLButtonElement>) => {
e.preventDefault();
void toast.promise(
deleteMessage.mutateAsync(),
{
pending: "Loading...",
}
);
};
const onSubmit = (e: React.FormEvent<HTMLFormElement>) => {
e.preventDefault();
void sendMessage.mutateAsync({ content: message }).then(() => {
void toast.promise(requestGPTResponse.mutateAsync(), {
pending: "Thinking...",
});
});
};
const scrollToBottom = () => {
messagesEndRef.current?.scrollIntoView({ behavior: "smooth" });
};
useEffect(() => {
scrollToBottom();
}, [messages]);
return (
<div
className="flex h-96 grow w-full flex-col items-center justify-center gap-1 rounded-lg "
>
<button className="h-8 w-full" onClick={clearChatHandler}>Clear chat</button>
<div className="m-0 flex h-full w-full flex-col items-end gap-3 overflow-scroll p-2 scrollbar-hide">
{messages.data?.slice(0).reverse().map((message, index) => (
< | Message message={message} key={index} />
))} |
<div className="h-0 w-0" ref={messagesEndRef} />
</div>
<form className="flex w-full" onSubmit={onSubmit}>
<TextInput placeholder="Message" value={message} setValue={setMessage} />
<button className="h-8 w-20" type="submit">Send</button>
</form>
</div>
);
}
| src/components/chat/ChatBox.tsx | hackathon-ufrt-gptnotes-e185e8c | [
{
"filename": "src/components/chat/Message.tsx",
"retrieved_chunk": " }, [message]);\n return (\n <div className=\"flex w-full flex-row\">\n {!message.isGPT && <div className=\"w-2/6\" />}\n <div\n className={\n \"w-full rounded-2xl bg-white p-2 \" +\n (message.isGPT ? \" rounded-bl-none\" : \"rounded-br-none\")\n }\n >",
"score": 0.8714929819107056
},
{
"filename": "src/pages/index.tsx",
"retrieved_chunk": " </h1>\n {sessionData &&\n <div className=\"flex h-full w-full flex-col gap-5 md:h-128 md:flex-row-reverse\">\n <TodoBox />\n <SelectPageWrapper />\n </div>\n }\n <div className=\"flex items-center\">\n <AuthShowcase />\n </div>",
"score": 0.8591585755348206
},
{
"filename": "src/pages/index.tsx",
"retrieved_chunk": " <Head>\n <title>TodoGPT</title>\n <meta name=\"description\" content=\"Smartest Todo app on the market\" />\n <link rel=\"icon\" href=\"/favicon.ico\" />\n </Head>\n <main className=\"flex min-h-screen flex-col items-center justify-center bg-gray-900\">\n <div className=\"container flex flex-col items-center justify-center gap-12 px-4 py-16 \">\n <h1 className=\"text-5xl font-extrabold tracking-tight text-white sm:text-[5rem]\">\n The worlds smartest{\" \"}\n <span className=\"text-green-500\">Todo</span> App",
"score": 0.8433717489242554
},
{
"filename": "src/components/todo/Todo.tsx",
"retrieved_chunk": " <div className=\"flex flex-col items-center justify-center gap-4 rounded-lg bg-white/10 p-4\">\n <div className=\"flex w-full items-center gap-3\">\n <input\n onClick={onClick}\n type=\"checkbox\"\n checked={done}\n className=\"h-4 w-4 rounded border-gray-300 bg-gray-100 text-blue-600 focus:ring-2 focus:ring-blue-500 dark:border-gray-600 dark:bg-gray-700 dark:ring-offset-gray-800 dark:focus:ring-blue-600\"\n />\n <p className=\"text-2xl text-white\">{title}</p>\n {due && <p className=\"text-gray-300\">{due.toDateString()}</p>}",
"score": 0.8323787450790405
},
{
"filename": "src/components/todo/Todo.tsx",
"retrieved_chunk": " <div className=\"flex-1\" />\n <button className=\"text-white rounded-full bg-red-500 w-8 h-8\" onClick={deleteTodoHandler}>X</button>\n </div>\n </div>\n );\n}",
"score": 0.8079794645309448
}
] | typescript | Message message={message} key={index} />
))} |
import { Static, TObject, TUnion } from '@sinclair/typebox';
import { AbstractValidator } from './abstract-validator';
/**
* The key providing the object type in discriminated unions, if not
* specified in the schema's `discriminantKey` option.
*/
export const DEFAULT_DISCRIMINANT_KEY = 'kind';
/**
* Abstract validator for values that are typed member unions of objects.
*/
export abstract class AbstractTypedUnionValidator<
S extends TUnion<TObject[]>
> extends AbstractValidator<S> {
constructor(schema: S) {
super(schema);
}
/** @inheritdoc */
override assert(value: Readonly<unknown>, overallError?: string): void {
this.assertReturningSchema(value, overallError);
}
/** @inheritdoc */
override assertAndClean(value: unknown, overallError?: string): void {
const schema = this.assertReturningSchema(value as any, overallError);
this.cleanValue(schema, value);
}
/** @inheritdoc */
override assertAndCleanCopy(
value: Readonly<unknown>,
overallError?: string
): Static<S> {
const schema = this.assertReturningSchema(value, overallError);
return this. | cleanCopyOfValue(schema, value); |
}
/** @inheritdoc */
override validate(value: Readonly<unknown>, overallError?: string): void {
this.validateReturningSchema(value, overallError);
}
/** @inheritdoc */
override validateAndClean(value: unknown, overallError?: string): void {
const schema = this.validateReturningSchema(value as any, overallError);
this.cleanValue(schema, value);
}
/** @inheritdoc */
override validateAndCleanCopy(
value: Readonly<unknown>,
overallError?: string
): Static<S> {
const schema = this.validateReturningSchema(value, overallError);
return this.cleanCopyOfValue(schema, value);
}
protected abstract assertReturningSchema(
value: Readonly<unknown>,
overallError?: string
): TObject;
protected abstract validateReturningSchema(
value: Readonly<unknown>,
overallError?: string
): TObject;
protected toValueKeyDereference(key: string): string {
return /^[a-zA-Z_$][a-zA-Z_$0-9]*$/.test(key)
? `value.${key}`
: `value['${key.replace(/'/g, "\\'")}']`;
}
}
| src/abstract/abstract-typed-union-validator.ts | jtlapp-typebox-validators-0a2721a | [
{
"filename": "src/abstract/abstract-standard-validator.ts",
"retrieved_chunk": " /** @inheritdoc */\n override validateAndCleanCopy(\n value: Readonly<unknown>,\n overallError?: string\n ): Static<S> {\n this.validate(value, overallError);\n return this.cleanCopyOfValue(this.schema, value);\n }\n}",
"score": 0.9624803066253662
},
{
"filename": "src/abstract/abstract-standard-validator.ts",
"retrieved_chunk": " overallError?: string\n ): Static<S> {\n this.assert(value, overallError);\n return this.cleanCopyOfValue(this.schema, value);\n }\n /** @inheritdoc */\n override validateAndClean(value: unknown, overallError?: string): void {\n this.validate(value as any, overallError);\n this.cleanValue(this.schema, value);\n }",
"score": 0.9304230213165283
},
{
"filename": "src/abstract/abstract-standard-validator.ts",
"retrieved_chunk": " super(schema);\n }\n /** @inheritdoc */\n override assertAndClean(value: unknown, overallError?: string): void {\n this.assert(value as any, overallError);\n this.cleanValue(this.schema, value);\n }\n /** @inheritdoc */\n override assertAndCleanCopy(\n value: Readonly<unknown>,",
"score": 0.9298394918441772
},
{
"filename": "src/abstract/abstract-validator.ts",
"retrieved_chunk": " }\n protected uncompiledValidate(\n schema: Readonly<TSchema>,\n value: Readonly<unknown>,\n overallError?: string\n ): void {\n if (!Value.Check(schema, value)) {\n throwInvalidValidate(overallError, Value.Errors(schema, value));\n }\n }",
"score": 0.9170085191726685
},
{
"filename": "src/abstract/abstract-validator.ts",
"retrieved_chunk": " }\n }\n protected uncompiledAssert(\n schema: Readonly<TSchema>,\n value: Readonly<unknown>,\n overallError?: string\n ): void {\n if (!Value.Check(schema, value)) {\n throwInvalidAssert(overallError, Value.Errors(schema, value).First()!);\n }",
"score": 0.9107096195220947
}
] | typescript | cleanCopyOfValue(schema, value); |
import { type GetServerSidePropsContext } from "next";
import {
getServerSession,
type NextAuthOptions,
type DefaultSession,
} from "next-auth";
import GoogleProvider from "next-auth/providers/google";
import EmailProvider from "next-auth/providers/email";
import { PrismaAdapter } from "@next-auth/prisma-adapter";
import { env } from "~/env.mjs";
import { prisma } from "~/server/db";
/**
* Module augmentation for `next-auth` types. Allows us to add custom properties to the `session`
* object and keep type safety.
*
* @see https://next-auth.js.org/getting-started/typescript#module-augmentation
*/
declare module "next-auth" {
interface Session extends DefaultSession {
user: {
id: string;
// ...other properties
// role: UserRole;
} & DefaultSession["user"];
}
// interface User {
// // ...other properties
// // role: UserRole;
// }
}
/**
* Options for NextAuth.js used to configure adapters, providers, callbacks, etc.
*
* @see https://next-auth.js.org/configuration/options
*/
export const authOptions: NextAuthOptions = {
callbacks: {
session: ({ session, user }) => ({
...session,
user: {
...session.user,
id: user.id,
},
}),
},
| adapter: PrismaAdapter(prisma),
providers: [
EmailProvider({ |
server: {
host: env.EMAIL_SERVER_HOST,
port: env.EMAIL_SERVER_PORT,
auth: {
user: env.EMAIL_SERVER_USER,
pass: env.EMAIL_SERVER_PASSWORD
}
},
from: env.EMAIL_FROM
}),
GoogleProvider({
clientId: env.GOOGLE_CLIENT_ID,
clientSecret: env.GOOGLE_CLIENT_SECRET,
}),
/**
* ...add more providers here.
*
* Most other providers require a bit more work than the Discord provider. For example, the
* GitHub provider requires you to add the `refresh_token_expires_in` field to the Account
* model. Refer to the NextAuth.js docs for the provider you want to use. Example:
*
* @see https://next-auth.js.org/providers/github
*/
]
};
/**
* Wrapper for `getServerSession` so that you don't need to import the `authOptions` in every file.
*
* @see https://next-auth.js.org/configuration/nextjs
*/
export const getServerAuthSession = (ctx: {
req: GetServerSidePropsContext["req"];
res: GetServerSidePropsContext["res"];
}) => {
return getServerSession(ctx.req, ctx.res, authOptions);
};
| src/server/auth.ts | hackathon-ufrt-gptnotes-e185e8c | [
{
"filename": "src/server/api/trpc.ts",
"retrieved_chunk": " return next({\n ctx: {\n // infers the `session` as non-nullable\n session: { ...ctx.session, user: ctx.session.user },\n },\n });\n});\n/**\n * Protected (authenticated) procedure\n *",
"score": 0.8756741285324097
},
{
"filename": "src/server/api/routers/message.ts",
"retrieved_chunk": " characterId: character?.id,\n },\n });\n }),\n findAll: protectedProcedure.query(({ ctx }) => {\n return ctx.prisma.message.findMany({\n where: {\n authorId: ctx.session.user.id,\n },\n include: {",
"score": 0.8447454571723938
},
{
"filename": "src/server/api/routers/message.ts",
"retrieved_chunk": " })\n )\n .mutation(({ input, ctx }) => {\n return ctx.prisma.message.create({\n data: {\n content: input.content,\n authorId: ctx.session.user.id,\n },\n });\n }),",
"score": 0.8428901433944702
},
{
"filename": "src/server/api/routers/character.ts",
"retrieved_chunk": " .mutation(({ input, ctx }) => {\n return ctx.prisma.user.update({\n where: {\n id: ctx.session.user.id,\n },\n data: {\n activeCharacterId: input.id,\n },\n });\n }),",
"score": 0.8398909568786621
},
{
"filename": "src/server/api/routers/message.ts",
"retrieved_chunk": " character: true,\n },\n take: 6,\n orderBy: {\n createdAt: \"desc\",\n },\n });\n }),\n deleteAll: protectedProcedure.mutation(({ ctx }) => {\n return ctx.prisma.message.deleteMany({",
"score": 0.8263444304466248
}
] | typescript | adapter: PrismaAdapter(prisma),
providers: [
EmailProvider({ |
import { type GetServerSidePropsContext } from "next";
import {
getServerSession,
type NextAuthOptions,
type DefaultSession,
} from "next-auth";
import GoogleProvider from "next-auth/providers/google";
import EmailProvider from "next-auth/providers/email";
import { PrismaAdapter } from "@next-auth/prisma-adapter";
import { env } from "~/env.mjs";
import { prisma } from "~/server/db";
/**
* Module augmentation for `next-auth` types. Allows us to add custom properties to the `session`
* object and keep type safety.
*
* @see https://next-auth.js.org/getting-started/typescript#module-augmentation
*/
declare module "next-auth" {
interface Session extends DefaultSession {
user: {
id: string;
// ...other properties
// role: UserRole;
} & DefaultSession["user"];
}
// interface User {
// // ...other properties
// // role: UserRole;
// }
}
/**
* Options for NextAuth.js used to configure adapters, providers, callbacks, etc.
*
* @see https://next-auth.js.org/configuration/options
*/
export const authOptions: NextAuthOptions = {
callbacks: {
session: ({ session, user }) => ({
...session,
user: {
...session.user,
id: user.id,
},
}),
},
adapter: PrismaAdapter(prisma),
providers: [
EmailProvider({
server: {
| host: env.EMAIL_SERVER_HOST,
port: env.EMAIL_SERVER_PORT,
auth: { |
user: env.EMAIL_SERVER_USER,
pass: env.EMAIL_SERVER_PASSWORD
}
},
from: env.EMAIL_FROM
}),
GoogleProvider({
clientId: env.GOOGLE_CLIENT_ID,
clientSecret: env.GOOGLE_CLIENT_SECRET,
}),
/**
* ...add more providers here.
*
* Most other providers require a bit more work than the Discord provider. For example, the
* GitHub provider requires you to add the `refresh_token_expires_in` field to the Account
* model. Refer to the NextAuth.js docs for the provider you want to use. Example:
*
* @see https://next-auth.js.org/providers/github
*/
]
};
/**
* Wrapper for `getServerSession` so that you don't need to import the `authOptions` in every file.
*
* @see https://next-auth.js.org/configuration/nextjs
*/
export const getServerAuthSession = (ctx: {
req: GetServerSidePropsContext["req"];
res: GetServerSidePropsContext["res"];
}) => {
return getServerSession(ctx.req, ctx.res, authOptions);
};
| src/server/auth.ts | hackathon-ufrt-gptnotes-e185e8c | [
{
"filename": "src/server/api/trpc.ts",
"retrieved_chunk": " return next({\n ctx: {\n // infers the `session` as non-nullable\n session: { ...ctx.session, user: ctx.session.user },\n },\n });\n});\n/**\n * Protected (authenticated) procedure\n *",
"score": 0.7620663642883301
},
{
"filename": "src/utils/api.ts",
"retrieved_chunk": " */\n links: [\n loggerLink({\n enabled: (opts) =>\n process.env.NODE_ENV === \"development\" ||\n (opts.direction === \"down\" && opts.result instanceof Error),\n }),\n httpBatchLink({\n url: `${getBaseUrl()}/api/trpc`,\n }),",
"score": 0.7576133012771606
},
{
"filename": "src/server/api/routers/message.ts",
"retrieved_chunk": " characterId: character?.id,\n },\n });\n }),\n findAll: protectedProcedure.query(({ ctx }) => {\n return ctx.prisma.message.findMany({\n where: {\n authorId: ctx.session.user.id,\n },\n include: {",
"score": 0.7459473609924316
},
{
"filename": "src/utils/api.ts",
"retrieved_chunk": "import { type AppRouter } from \"~/server/api/root\";\nconst getBaseUrl = () => {\n if (typeof window !== \"undefined\") return \"\"; // browser should use relative url\n if (process.env.VERCEL_URL) return `https://${process.env.VERCEL_URL}`; // SSR should use vercel url\n return `http://localhost:${process.env.PORT ?? 3000}`; // dev SSR should use localhost\n};\n/** A set of type-safe react-query hooks for your tRPC API. */\nexport const api = createTRPCNext<AppRouter>({\n config() {\n return {",
"score": 0.7413881421089172
},
{
"filename": "src/server/api/routers/message.ts",
"retrieved_chunk": " character: true,\n },\n take: 6,\n orderBy: {\n createdAt: \"desc\",\n },\n });\n }),\n deleteAll: protectedProcedure.mutation(({ ctx }) => {\n return ctx.prisma.message.deleteMany({",
"score": 0.7411268949508667
}
] | typescript | host: env.EMAIL_SERVER_HOST,
port: env.EMAIL_SERVER_PORT,
auth: { |
import { Kind, TObject, TUnion } from '@sinclair/typebox';
import {
ValueError,
ValueErrorIterator,
ValueErrorType,
} from '@sinclair/typebox/errors';
import { ValidationException } from './validation-exception';
export const DEFAULT_OVERALL_MESSAGE = 'Invalid value';
export const DEFAULT_UNKNOWN_TYPE_MESSAGE = 'Object type not recognized';
const TYPEBOX_REQUIRED_ERROR_MESSAGE = 'Expected required property';
export function adjustErrorMessage(error: ValueError): ValueError {
if (error.schema.errorMessage !== undefined) {
error.message = error.schema.errorMessage;
}
return error;
}
export function createErrorsIterable(
typeboxErrorIterator: ValueErrorIterator
): Iterable<ValueError> {
return {
[Symbol.iterator]: function* () {
const errors = typeboxErrorIterator[Symbol.iterator]();
let result = errors.next();
let customErrorPath = '???'; // signals no prior path ('' can be root path)
while (result.value !== undefined) {
const error = result.value;
const standardMessage = error.message;
if (error.path !== customErrorPath) {
adjustErrorMessage(error);
if (error.message != standardMessage) {
customErrorPath = error.path;
yield error;
} else if (
// drop 'required' errors for values that have constraints
error.message != TYPEBOX_REQUIRED_ERROR_MESSAGE ||
['Any', 'Unknown'].includes(error.schema[Kind])
) {
yield error;
}
}
result = errors.next();
}
},
};
}
export function createUnionTypeError(
unionSchema: Readonly<TUnion<TObject[]>>,
value: Readonly<unknown>
): ValueError {
return {
type: ValueErrorType.Union,
path: '',
schema: unionSchema,
value,
message: unionSchema.errorMessage ?? DEFAULT_UNKNOWN_TYPE_MESSAGE,
};
}
export function createUnionTypeErrorIterable(
typeError: ValueError
): Iterable<ValueError> {
return {
[Symbol.iterator]: function* () {
yield typeError;
},
};
}
export function throwInvalidAssert(
overallError: string | undefined,
firstError: ValueError
): never {
adjustErrorMessage(firstError);
throw new ValidationException(
overallError === undefined
? DEFAULT_OVERALL_MESSAGE
: overallError.replace(
'{error}',
| ValidationException.errorToString(firstError)
),
[firstError]
); |
}
export function throwInvalidValidate(
overallError: string | undefined,
errorOrErrors: ValueError | ValueErrorIterator
): never {
throw new ValidationException(
overallError ?? DEFAULT_OVERALL_MESSAGE,
errorOrErrors instanceof ValueErrorIterator
? [...createErrorsIterable(errorOrErrors)]
: [errorOrErrors]
);
}
| src/lib/error-utils.ts | jtlapp-typebox-validators-0a2721a | [
{
"filename": "src/test/discriminated-union-validators-invalid.test.ts",
"retrieved_chunk": " description: 'undefined value',\n onlySpec: false,\n schema: wellFormedUnion1,\n value: undefined,\n assertMessage: DEFAULT_OVERALL_MESSAGE,\n errors: [{ path: '', message: DEFAULT_UNKNOWN_TYPE_MESSAGE }],\n assertString: defaultString,\n validateString: defaultString,\n },\n {",
"score": 0.829687774181366
},
{
"filename": "src/test/discriminated-union-validators-invalid.test.ts",
"retrieved_chunk": " description: 'empty object value',\n onlySpec: false,\n schema: wellFormedUnion1,\n value: {},\n assertMessage: DEFAULT_OVERALL_MESSAGE,\n errors: [{ path: '', message: DEFAULT_UNKNOWN_TYPE_MESSAGE }],\n assertString: defaultString,\n validateString: defaultString,\n },\n {",
"score": 0.8271744251251221
},
{
"filename": "src/test/discriminated-union-validators-invalid.test.ts",
"retrieved_chunk": " description: 'null value',\n onlySpec: false,\n schema: wellFormedUnion1,\n value: null,\n assertMessage: DEFAULT_OVERALL_MESSAGE,\n errors: [{ path: '', message: DEFAULT_UNKNOWN_TYPE_MESSAGE }],\n assertString: defaultString,\n validateString: defaultString,\n },\n {",
"score": 0.8245075345039368
},
{
"filename": "src/test/discriminated-union-validators-invalid.test.ts",
"retrieved_chunk": " description: 'simple literal value',\n onlySpec: false,\n schema: wellFormedUnion1,\n value: 'hello',\n assertMessage: DEFAULT_OVERALL_MESSAGE,\n errors: [{ path: '', message: DEFAULT_UNKNOWN_TYPE_MESSAGE }],\n assertString: defaultString,\n validateString: defaultString,\n },\n {",
"score": 0.8216490745544434
},
{
"filename": "src/test/heterogeneous-union-validators-invalid.test.ts",
"retrieved_chunk": " assertMessage: DEFAULT_OVERALL_MESSAGE,\n errors: [{ path: '', message: DEFAULT_UNKNOWN_TYPE_MESSAGE }],\n assertString: defaultString,\n validateString: defaultString,\n },\n {\n description: 'undefined value',\n onlySpec: false,\n schema: wellFormedUnion1,\n value: undefined,",
"score": 0.8144491910934448
}
] | typescript | ValidationException.errorToString(firstError)
),
[firstError]
); |
import { TObject, TUnion } from '@sinclair/typebox';
import {
AbstractCompilingTypedUnionValidator,
FindSchemaMemberIndex,
} from '../abstract/abstract-compiling-typed-union-validator';
import { TypeIdentifyingKeyIndex } from './type-identifying-key-index';
/**
* Lazily compiled validator for heterogeneous unions of objects. To improve
* performance, list the more frequently used types earlier in the union, and
* list each object's unique key first in its properties.
*/
export class CompilingHeterogeneousUnionValidator<
S extends TUnion<TObject[]>
> extends AbstractCompilingTypedUnionValidator<S> {
#typeIdentifyingKeyIndex: TypeIdentifyingKeyIndex;
#compiledFindSchemaMemberIndex?: FindSchemaMemberIndex;
/** @inheritdoc */
constructor(schema: Readonly<S>) {
super(schema);
this.#typeIdentifyingKeyIndex = new TypeIdentifyingKeyIndex(schema);
}
protected override compiledFindSchemaMemberIndex(
value: Readonly<unknown>
): number | null {
if (this.#compiledFindSchemaMemberIndex === undefined) {
this.#typeIdentifyingKeyIndex.cacheKeys();
const codeParts: string[] = [
`return ((typeof value !== 'object' || value === null || Array.isArray(value)) ? null : `,
];
for (let i = 0; i < this.schema.anyOf.length; ++i) {
const uniqueKey = this.#typeIdentifyingKeyIndex.keyByMemberIndex![i];
codeParts.push(
`${this | .toValueKeyDereference(uniqueKey)} !== undefined ? ${i} : `
); |
}
this.#compiledFindSchemaMemberIndex = new Function(
'value',
codeParts.join('') + 'null)'
) as FindSchemaMemberIndex;
}
return this.#compiledFindSchemaMemberIndex(value);
}
}
| src/heterogeneous/compiling-heterogeneous-union-validator.ts | jtlapp-typebox-validators-0a2721a | [
{
"filename": "src/heterogeneous/heterogeneous-union-validator.ts",
"retrieved_chunk": " }\n if (typeof value === 'object' && value !== null) {\n for (let i = 0; i < this.schema.anyOf.length; ++i) {\n const uniqueKey = this.#typeIdentifyingKeyIndex.keyByMemberIndex![i];\n if (value[uniqueKey] !== undefined) {\n return i;\n }\n }\n }\n return createUnionTypeError(this.schema, value);",
"score": 0.8930724263191223
},
{
"filename": "src/discriminated/compiling-discriminated-union-validator.ts",
"retrieved_chunk": " this.schema.discriminantKey ?? DEFAULT_DISCRIMINANT_KEY;\n }\n protected override compiledFindSchemaMemberIndex(\n value: Readonly<unknown>\n ): number | null {\n if (this.#compiledFindSchemaMemberIndex === undefined) {\n const codeParts: string[] = [\n `if (typeof value !== 'object' || value === null || Array.isArray(value)) return null;\n switch (${this.toValueKeyDereference(this.#discriminantKey)}) {\\n`,\n ];",
"score": 0.8733566999435425
},
{
"filename": "src/abstract/abstract-compiling-typed-union-validator.ts",
"retrieved_chunk": " if (this.#compiledSchemaMemberTests[memberIndex] === undefined) {\n let code = TypeCompiler.Compile(this.schema.anyOf[memberIndex]).Code();\n code = code.replace(\n `(typeof value === 'object' && value !== null && !Array.isArray(value)) &&`,\n ''\n );\n // provide some resilience to change in TypeBox compiled code formatting\n const startOfFunction = code.indexOf('function');\n const startOfReturn = code.indexOf('return', startOfFunction);\n code =",
"score": 0.8715716004371643
},
{
"filename": "src/heterogeneous/type-identifying-key-index.ts",
"retrieved_chunk": " for (let i = 0; i < unionSize; ++i) {\n const memberSchema = this.schema.anyOf[i];\n for (const [key, schema] of Object.entries(memberSchema.properties)) {\n if (schema.typeIdentifyingKey) {\n if (schema[Optional] == 'Optional') {\n throw Error(MESSAGE_OPTIONAL_TYPE_ID_KEY);\n }\n if (this.keyByMemberIndex[i] !== undefined) {\n throw Error(MESSAGE_MEMBER_WITH_MULTIPLE_KEYS);\n }",
"score": 0.8440884947776794
},
{
"filename": "src/discriminated/discriminated-union-validator.ts",
"retrieved_chunk": " this.#unionIsWellformed = true;\n }\n if (typeof subject === 'object' && subject !== null) {\n const subjectKind = subject[this.discriminantKey];\n if (subjectKind !== undefined) {\n for (let i = 0; i < this.schema.anyOf.length; ++i) {\n const memberKind =\n this.schema.anyOf[i].properties[this.discriminantKey];\n if (memberKind !== undefined && memberKind.const === subjectKind) {\n return i;",
"score": 0.8260809779167175
}
] | typescript | .toValueKeyDereference(uniqueKey)} !== undefined ? ${i} : `
); |
import { type GetServerSidePropsContext } from "next";
import {
getServerSession,
type NextAuthOptions,
type DefaultSession,
} from "next-auth";
import GoogleProvider from "next-auth/providers/google";
import EmailProvider from "next-auth/providers/email";
import { PrismaAdapter } from "@next-auth/prisma-adapter";
import { env } from "~/env.mjs";
import { prisma } from "~/server/db";
/**
* Module augmentation for `next-auth` types. Allows us to add custom properties to the `session`
* object and keep type safety.
*
* @see https://next-auth.js.org/getting-started/typescript#module-augmentation
*/
declare module "next-auth" {
interface Session extends DefaultSession {
user: {
id: string;
// ...other properties
// role: UserRole;
} & DefaultSession["user"];
}
// interface User {
// // ...other properties
// // role: UserRole;
// }
}
/**
* Options for NextAuth.js used to configure adapters, providers, callbacks, etc.
*
* @see https://next-auth.js.org/configuration/options
*/
export const authOptions: NextAuthOptions = {
callbacks: {
session: ({ session, user }) => ({
...session,
user: {
...session.user,
id: user.id,
},
}),
},
adapter: PrismaAdapter(prisma),
providers: [
EmailProvider({
server: {
host: env. | EMAIL_SERVER_HOST,
port: env.EMAIL_SERVER_PORT,
auth: { |
user: env.EMAIL_SERVER_USER,
pass: env.EMAIL_SERVER_PASSWORD
}
},
from: env.EMAIL_FROM
}),
GoogleProvider({
clientId: env.GOOGLE_CLIENT_ID,
clientSecret: env.GOOGLE_CLIENT_SECRET,
}),
/**
* ...add more providers here.
*
* Most other providers require a bit more work than the Discord provider. For example, the
* GitHub provider requires you to add the `refresh_token_expires_in` field to the Account
* model. Refer to the NextAuth.js docs for the provider you want to use. Example:
*
* @see https://next-auth.js.org/providers/github
*/
]
};
/**
* Wrapper for `getServerSession` so that you don't need to import the `authOptions` in every file.
*
* @see https://next-auth.js.org/configuration/nextjs
*/
export const getServerAuthSession = (ctx: {
req: GetServerSidePropsContext["req"];
res: GetServerSidePropsContext["res"];
}) => {
return getServerSession(ctx.req, ctx.res, authOptions);
};
| src/server/auth.ts | hackathon-ufrt-gptnotes-e185e8c | [
{
"filename": "src/server/api/trpc.ts",
"retrieved_chunk": " return next({\n ctx: {\n // infers the `session` as non-nullable\n session: { ...ctx.session, user: ctx.session.user },\n },\n });\n});\n/**\n * Protected (authenticated) procedure\n *",
"score": 0.7556383609771729
},
{
"filename": "src/utils/api.ts",
"retrieved_chunk": " */\n links: [\n loggerLink({\n enabled: (opts) =>\n process.env.NODE_ENV === \"development\" ||\n (opts.direction === \"down\" && opts.result instanceof Error),\n }),\n httpBatchLink({\n url: `${getBaseUrl()}/api/trpc`,\n }),",
"score": 0.7503929734230042
},
{
"filename": "src/utils/api.ts",
"retrieved_chunk": "import { type AppRouter } from \"~/server/api/root\";\nconst getBaseUrl = () => {\n if (typeof window !== \"undefined\") return \"\"; // browser should use relative url\n if (process.env.VERCEL_URL) return `https://${process.env.VERCEL_URL}`; // SSR should use vercel url\n return `http://localhost:${process.env.PORT ?? 3000}`; // dev SSR should use localhost\n};\n/** A set of type-safe react-query hooks for your tRPC API. */\nexport const api = createTRPCNext<AppRouter>({\n config() {\n return {",
"score": 0.7394489049911499
},
{
"filename": "src/server/api/routers/message.ts",
"retrieved_chunk": " characterId: character?.id,\n },\n });\n }),\n findAll: protectedProcedure.query(({ ctx }) => {\n return ctx.prisma.message.findMany({\n where: {\n authorId: ctx.session.user.id,\n },\n include: {",
"score": 0.7341573238372803
},
{
"filename": "src/server/api/routers/message.ts",
"retrieved_chunk": " character: true,\n },\n take: 6,\n orderBy: {\n createdAt: \"desc\",\n },\n });\n }),\n deleteAll: protectedProcedure.mutation(({ ctx }) => {\n return ctx.prisma.message.deleteMany({",
"score": 0.7262527942657471
}
] | typescript | EMAIL_SERVER_HOST,
port: env.EMAIL_SERVER_PORT,
auth: { |
import { createTRPCRouter, protectedProcedure } from "~/server/api/trpc";
import { z } from "zod";
import { createOpenAICompletion } from "~/external/openai/chatGPTApi";
import { ChatGPTMessage } from "~/external/openai/chatGPTMessage";
import { parseActionCode, stringifyActionCode } from "~/external/openai/chatGPTActionItems";
export const messageRouter = createTRPCRouter({
create: protectedProcedure
.input(
z.object({
content: z.string().min(1).max(200),
})
)
.mutation(({ input, ctx }) => {
return ctx.prisma.message.create({
data: {
content: input.content,
authorId: ctx.session.user.id,
},
});
}),
generateGPT: protectedProcedure.mutation(async ({ ctx }) => {
const todoList = await ctx.prisma.todo.findMany({
where: {
authorId: ctx.session.user.id,
},
});
const lastNMessages = await ctx.prisma.message.findMany({
where: {
authorId: ctx.session.user.id,
},
orderBy: {
createdAt: "desc",
},
take: 5,
include: {
character: true,
},
});
const character = await ctx.prisma.user.findUnique({
where: {
id: ctx.session.user.id,
},
}).activeCharacter();
const chatGptResponse = await createOpenAICompletion(
{
type: "assistant",
characterDescription: character?.content ?? "The depressed robot from Hitchhiker's Guide to the Galaxy",
characterName: character?.name ?? "Marvin",
exampleConverstationStart: character?.exampleConverstationStart ?? "Here I am, brain the size of a planet, and this is what they ask me to do",
actions: []
},
todoList,
lastNMessages.reverse().map((message) => {
if (message.isGPT) {
return {
type: "assistant",
characterDescription: message.character?.content,
characterName: message.character?.name,
actions: parseActionCode(message.content),
| } as ChatGPTMessage; |
}
return {
type: "user",
content: message.content,
} as ChatGPTMessage;
}),
);
for (const action of chatGptResponse.actions) {
if (action.type === "add") {
await ctx.prisma.todo.create({
data: {
title: action.content,
due: action.due,
authorId: ctx.session.user.id,
},
});
}
if (action.type === "complete") {
await ctx.prisma.todo.update({
where: {
id: action.id,
},
data: {
done: true,
},
});
}
if (action.type === "delete") {
await ctx.prisma.todo.delete({
where: {
id: action.id,
},
});
}
if (action.type === "uncomplete") {
await ctx.prisma.todo.update({
where: {
id: action.id,
},
data: {
done: false,
},
});
}
}
return ctx.prisma.message.create({
data: {
content: stringifyActionCode(chatGptResponse.actions),
authorId: ctx.session.user.id,
isGPT: true,
characterId: character?.id,
},
});
}),
findAll: protectedProcedure.query(({ ctx }) => {
return ctx.prisma.message.findMany({
where: {
authorId: ctx.session.user.id,
},
include: {
character: true,
},
take: 6,
orderBy: {
createdAt: "desc",
},
});
}),
deleteAll: protectedProcedure.mutation(({ ctx }) => {
return ctx.prisma.message.deleteMany({
where: {
authorId: ctx.session.user.id,
},
});
}),
});
| src/server/api/routers/message.ts | hackathon-ufrt-gptnotes-e185e8c | [
{
"filename": "src/external/openai/chatGPTApi.ts",
"retrieved_chunk": " content: message.type === \"assistant\" ? stringifyActionCode(message.actions) : message.content,\n role: message.type === \"assistant\" ? ChatCompletionRequestMessageRoleEnum.Assistant : ChatCompletionRequestMessageRoleEnum.User as ChatCompletionRequestMessageRoleEnum,\n };\n });\n messages = [{\n content: system,\n role: ChatCompletionRequestMessageRoleEnum.System,\n }, ...messages];\n // Run some checks to prevent abuse\n if (messages.length >= 7) {",
"score": 0.819688081741333
},
{
"filename": "src/external/openai/chatGPTMessage.ts",
"retrieved_chunk": "import { ChatGPTActionItems } from \"./chatGPTActionItems\";\nexport type ChatGPTCharacter = {\n type: \"assistant\",\n characterDescription: string,\n characterName: string,\n exampleConverstationStart: string,\n actions: ChatGPTActionItems[],\n}\nexport type ChatGPTUser = {\n type: \"user\",",
"score": 0.804608941078186
},
{
"filename": "src/external/openai/chatGPTActionItems.ts",
"retrieved_chunk": "};\nexport type ChatGPTActionItems = ChatGPTAction | ChatGPTActionComplete | ChatGPTActionPrint | ChatGPTActionDelete | ChatGPTActionUncomplete;\nexport function stringifyActionCode(actions: ChatGPTActionItems[]): string {\n return actions.map((action) => {\n switch (action.type) {\n case \"add\":\n return `ADD(${action.due.toDateString()}, \"${action.content}\")`;\n case \"complete\":\n return `COMPLETE(${action.id})`;\n case \"delete\":",
"score": 0.7933003902435303
},
{
"filename": "src/external/openai/chatGPTApi.ts",
"retrieved_chunk": " if (todoList.find((todo) => todo.id === action.id) === undefined) {\n throw new Error(`Invalid todo id ${action.id}`);\n }\n }\n }\n return {\n type: \"assistant\",\n characterName: currentCharacter.characterName,\n characterDescription: currentCharacter.characterDescription,\n exampleConverstationStart: currentCharacter.exampleConverstationStart,",
"score": 0.7776786684989929
},
{
"filename": "src/external/openai/chatGPTApi.ts",
"retrieved_chunk": " apiKey: env.OPENAI_API_KEY,\n});\nconst openai = new OpenAIApi(configuration);\nexport async function listModels() {\n const response = await openai.listModels();\n const models = response.data.data.map((model) => model.id);\n return models;\n}\nexport async function createOpenAICompletion(currentCharacter: ChatGPTCharacter, todoList: ChatGPTTodo[], chatHistory: ChatGPTMessage[]): Promise<ChatGPTCharacter> {\n const exampleTodoItem = todoList.length > 0 ? todoList[0]?.title ?? \"do something\" : \"do something\";",
"score": 0.76532381772995
}
] | typescript | } as ChatGPTMessage; |
import { createTRPCRouter, protectedProcedure } from "~/server/api/trpc";
import { z } from "zod";
import { createOpenAICompletion } from "~/external/openai/chatGPTApi";
import { ChatGPTMessage } from "~/external/openai/chatGPTMessage";
import { parseActionCode, stringifyActionCode } from "~/external/openai/chatGPTActionItems";
export const messageRouter = createTRPCRouter({
create: protectedProcedure
.input(
z.object({
content: z.string().min(1).max(200),
})
)
.mutation(({ input, ctx }) => {
return ctx.prisma.message.create({
data: {
content: input.content,
authorId: ctx.session.user.id,
},
});
}),
generateGPT: protectedProcedure.mutation(async ({ ctx }) => {
const todoList = await ctx.prisma.todo.findMany({
where: {
authorId: ctx.session.user.id,
},
});
const lastNMessages = await ctx.prisma.message.findMany({
where: {
authorId: ctx.session.user.id,
},
orderBy: {
createdAt: "desc",
},
take: 5,
include: {
character: true,
},
});
const character = await ctx.prisma.user.findUnique({
where: {
id: ctx.session.user.id,
},
}).activeCharacter();
const chatGptResponse = await createOpenAICompletion(
{
type: "assistant",
characterDescription: character?.content ?? "The depressed robot from Hitchhiker's Guide to the Galaxy",
characterName: character?.name ?? "Marvin",
exampleConverstationStart: character?.exampleConverstationStart ?? "Here I am, brain the size of a planet, and this is what they ask me to do",
actions: []
},
todoList,
lastNMessages.reverse().map((message) => {
if (message.isGPT) {
return {
type: "assistant",
characterDescription: message.character?.content,
characterName: message.character?.name,
actions: | parseActionCode(message.content),
} as ChatGPTMessage; |
}
return {
type: "user",
content: message.content,
} as ChatGPTMessage;
}),
);
for (const action of chatGptResponse.actions) {
if (action.type === "add") {
await ctx.prisma.todo.create({
data: {
title: action.content,
due: action.due,
authorId: ctx.session.user.id,
},
});
}
if (action.type === "complete") {
await ctx.prisma.todo.update({
where: {
id: action.id,
},
data: {
done: true,
},
});
}
if (action.type === "delete") {
await ctx.prisma.todo.delete({
where: {
id: action.id,
},
});
}
if (action.type === "uncomplete") {
await ctx.prisma.todo.update({
where: {
id: action.id,
},
data: {
done: false,
},
});
}
}
return ctx.prisma.message.create({
data: {
content: stringifyActionCode(chatGptResponse.actions),
authorId: ctx.session.user.id,
isGPT: true,
characterId: character?.id,
},
});
}),
findAll: protectedProcedure.query(({ ctx }) => {
return ctx.prisma.message.findMany({
where: {
authorId: ctx.session.user.id,
},
include: {
character: true,
},
take: 6,
orderBy: {
createdAt: "desc",
},
});
}),
deleteAll: protectedProcedure.mutation(({ ctx }) => {
return ctx.prisma.message.deleteMany({
where: {
authorId: ctx.session.user.id,
},
});
}),
});
| src/server/api/routers/message.ts | hackathon-ufrt-gptnotes-e185e8c | [
{
"filename": "src/external/openai/chatGPTApi.ts",
"retrieved_chunk": " content: message.type === \"assistant\" ? stringifyActionCode(message.actions) : message.content,\n role: message.type === \"assistant\" ? ChatCompletionRequestMessageRoleEnum.Assistant : ChatCompletionRequestMessageRoleEnum.User as ChatCompletionRequestMessageRoleEnum,\n };\n });\n messages = [{\n content: system,\n role: ChatCompletionRequestMessageRoleEnum.System,\n }, ...messages];\n // Run some checks to prevent abuse\n if (messages.length >= 7) {",
"score": 0.8171473741531372
},
{
"filename": "src/external/openai/chatGPTMessage.ts",
"retrieved_chunk": "import { ChatGPTActionItems } from \"./chatGPTActionItems\";\nexport type ChatGPTCharacter = {\n type: \"assistant\",\n characterDescription: string,\n characterName: string,\n exampleConverstationStart: string,\n actions: ChatGPTActionItems[],\n}\nexport type ChatGPTUser = {\n type: \"user\",",
"score": 0.8056856393814087
},
{
"filename": "src/external/openai/chatGPTActionItems.ts",
"retrieved_chunk": "};\nexport type ChatGPTActionItems = ChatGPTAction | ChatGPTActionComplete | ChatGPTActionPrint | ChatGPTActionDelete | ChatGPTActionUncomplete;\nexport function stringifyActionCode(actions: ChatGPTActionItems[]): string {\n return actions.map((action) => {\n switch (action.type) {\n case \"add\":\n return `ADD(${action.due.toDateString()}, \"${action.content}\")`;\n case \"complete\":\n return `COMPLETE(${action.id})`;\n case \"delete\":",
"score": 0.7888163328170776
},
{
"filename": "src/external/openai/chatGPTApi.ts",
"retrieved_chunk": " console.log(messages)\n const response = await openai.createChatCompletion({\n model: \"gpt-4\",\n messages: messages,\n })\n for (const choices of response?.data?.choices) {\n console.log(choices);\n }\n const completion = response?.data?.choices[0]?.message?.content;\n if (completion === undefined) {",
"score": 0.7578976154327393
},
{
"filename": "src/external/openai/chatGPTActionItems.ts",
"retrieved_chunk": "export type ChatGPTAction = {\n type: \"add\",\n due: Date,\n content: string\n};\nexport type ChatGPTActionComplete = {\n type: \"complete\",\n id: string\n};\nexport type ChatGPTActionDelete = {",
"score": 0.751667320728302
}
] | typescript | parseActionCode(message.content),
} as ChatGPTMessage; |
import { TextInput } from "~/components/basic/TextInput";
import { useEffect, useRef, useState } from "react";
import { api } from "~/utils/api";
import { toast } from "react-toastify";
import { Message } from "~/components/chat/Message";
export function ChatBox() {
const [message, setMessage] = useState("");
const context = api.useContext();
const messages = api.message.findAll.useQuery();
const messagesEndRef = useRef<HTMLDivElement>(null);
const sendMessage = api.message.create.useMutation({
onSuccess: () => {
void context.message.invalidate();
setMessage("");
},
onError: (err) => {
toast.error(err.message);
},
});
const requestGPTResponse = api.message.generateGPT.useMutation({
onSuccess: () => {
void context.message.invalidate();
void context.todo.invalidate();
},
onError: (err) => {
toast.error(err.message);
},
});
const deleteMessage = api.message.deleteAll.useMutation({
onSuccess: async () => {
await context.message.invalidate();
},
onError: (err) => {
toast.error(err.message);
},
});
const clearChatHandler = (e: React.MouseEvent<HTMLButtonElement>) => {
e.preventDefault();
void toast.promise(
deleteMessage.mutateAsync(),
{
pending: "Loading...",
}
);
};
const onSubmit = (e: React.FormEvent<HTMLFormElement>) => {
e.preventDefault();
void sendMessage.mutateAsync({ content: message }).then(() => {
void toast.promise(requestGPTResponse.mutateAsync(), {
pending: "Thinking...",
});
});
};
const scrollToBottom = () => {
messagesEndRef.current?.scrollIntoView({ behavior: "smooth" });
};
useEffect(() => {
scrollToBottom();
}, [messages]);
return (
<div
className="flex h-96 grow w-full flex-col items-center justify-center gap-1 rounded-lg "
>
<button className="h-8 w-full" onClick={clearChatHandler}>Clear chat</button>
<div className="m-0 flex h-full w-full flex-col items-end gap-3 overflow-scroll p-2 scrollbar-hide">
{messages.data?.slice(0).reverse().map((message, index) => (
| <Message message={message} key={index} />
))} |
<div className="h-0 w-0" ref={messagesEndRef} />
</div>
<form className="flex w-full" onSubmit={onSubmit}>
<TextInput placeholder="Message" value={message} setValue={setMessage} />
<button className="h-8 w-20" type="submit">Send</button>
</form>
</div>
);
}
| src/components/chat/ChatBox.tsx | hackathon-ufrt-gptnotes-e185e8c | [
{
"filename": "src/components/chat/Message.tsx",
"retrieved_chunk": " }, [message]);\n return (\n <div className=\"flex w-full flex-row\">\n {!message.isGPT && <div className=\"w-2/6\" />}\n <div\n className={\n \"w-full rounded-2xl bg-white p-2 \" +\n (message.isGPT ? \" rounded-bl-none\" : \"rounded-br-none\")\n }\n >",
"score": 0.8907173275947571
},
{
"filename": "src/pages/index.tsx",
"retrieved_chunk": " </h1>\n {sessionData &&\n <div className=\"flex h-full w-full flex-col gap-5 md:h-128 md:flex-row-reverse\">\n <TodoBox />\n <SelectPageWrapper />\n </div>\n }\n <div className=\"flex items-center\">\n <AuthShowcase />\n </div>",
"score": 0.8542909622192383
},
{
"filename": "src/components/todo/TodoBox.tsx",
"retrieved_chunk": "import { api } from \"~/utils/api\";\nimport Todo from \"~/components/todo/Todo\";\nexport function TodoBox() {\n const todos = api.todo.findAll.useQuery();\n return (\n <div className=\"flex w-full flex-col gap-2\">\n <div className=\"flex flex-col gap-3 overflow-scroll rounded scrollbar-hide\">\n {todos.data?.map((todo, index) => (\n <Todo todo={todo} key={index} />\n ))}",
"score": 0.8405461311340332
},
{
"filename": "src/pages/index.tsx",
"retrieved_chunk": " <Head>\n <title>TodoGPT</title>\n <meta name=\"description\" content=\"Smartest Todo app on the market\" />\n <link rel=\"icon\" href=\"/favicon.ico\" />\n </Head>\n <main className=\"flex min-h-screen flex-col items-center justify-center bg-gray-900\">\n <div className=\"container flex flex-col items-center justify-center gap-12 px-4 py-16 \">\n <h1 className=\"text-5xl font-extrabold tracking-tight text-white sm:text-[5rem]\">\n The worlds smartest{\" \"}\n <span className=\"text-green-500\">Todo</span> App",
"score": 0.8182191848754883
},
{
"filename": "src/components/todo/Todo.tsx",
"retrieved_chunk": " <div className=\"flex flex-col items-center justify-center gap-4 rounded-lg bg-white/10 p-4\">\n <div className=\"flex w-full items-center gap-3\">\n <input\n onClick={onClick}\n type=\"checkbox\"\n checked={done}\n className=\"h-4 w-4 rounded border-gray-300 bg-gray-100 text-blue-600 focus:ring-2 focus:ring-blue-500 dark:border-gray-600 dark:bg-gray-700 dark:ring-offset-gray-800 dark:focus:ring-blue-600\"\n />\n <p className=\"text-2xl text-white\">{title}</p>\n {due && <p className=\"text-gray-300\">{due.toDateString()}</p>}",
"score": 0.7972321510314941
}
] | typescript | <Message message={message} key={index} />
))} |
import { type GetServerSidePropsContext } from "next";
import {
getServerSession,
type NextAuthOptions,
type DefaultSession,
} from "next-auth";
import GoogleProvider from "next-auth/providers/google";
import EmailProvider from "next-auth/providers/email";
import { PrismaAdapter } from "@next-auth/prisma-adapter";
import { env } from "~/env.mjs";
import { prisma } from "~/server/db";
/**
* Module augmentation for `next-auth` types. Allows us to add custom properties to the `session`
* object and keep type safety.
*
* @see https://next-auth.js.org/getting-started/typescript#module-augmentation
*/
declare module "next-auth" {
interface Session extends DefaultSession {
user: {
id: string;
// ...other properties
// role: UserRole;
} & DefaultSession["user"];
}
// interface User {
// // ...other properties
// // role: UserRole;
// }
}
/**
* Options for NextAuth.js used to configure adapters, providers, callbacks, etc.
*
* @see https://next-auth.js.org/configuration/options
*/
export const authOptions: NextAuthOptions = {
callbacks: {
session: ({ session, user }) => ({
...session,
user: {
...session.user,
id: user.id,
},
}),
},
adapter: PrismaAdapter(prisma),
providers: [
EmailProvider({
server: {
host: env.EMAIL_SERVER_HOST,
port: env.EMAIL_SERVER_PORT,
auth: {
user: env.EMAIL_SERVER_USER,
| pass: env.EMAIL_SERVER_PASSWORD
} |
},
from: env.EMAIL_FROM
}),
GoogleProvider({
clientId: env.GOOGLE_CLIENT_ID,
clientSecret: env.GOOGLE_CLIENT_SECRET,
}),
/**
* ...add more providers here.
*
* Most other providers require a bit more work than the Discord provider. For example, the
* GitHub provider requires you to add the `refresh_token_expires_in` field to the Account
* model. Refer to the NextAuth.js docs for the provider you want to use. Example:
*
* @see https://next-auth.js.org/providers/github
*/
]
};
/**
* Wrapper for `getServerSession` so that you don't need to import the `authOptions` in every file.
*
* @see https://next-auth.js.org/configuration/nextjs
*/
export const getServerAuthSession = (ctx: {
req: GetServerSidePropsContext["req"];
res: GetServerSidePropsContext["res"];
}) => {
return getServerSession(ctx.req, ctx.res, authOptions);
};
| src/server/auth.ts | hackathon-ufrt-gptnotes-e185e8c | [
{
"filename": "src/utils/api.ts",
"retrieved_chunk": "import { type AppRouter } from \"~/server/api/root\";\nconst getBaseUrl = () => {\n if (typeof window !== \"undefined\") return \"\"; // browser should use relative url\n if (process.env.VERCEL_URL) return `https://${process.env.VERCEL_URL}`; // SSR should use vercel url\n return `http://localhost:${process.env.PORT ?? 3000}`; // dev SSR should use localhost\n};\n/** A set of type-safe react-query hooks for your tRPC API. */\nexport const api = createTRPCNext<AppRouter>({\n config() {\n return {",
"score": 0.7016719579696655
},
{
"filename": "src/utils/api.ts",
"retrieved_chunk": " */\n links: [\n loggerLink({\n enabled: (opts) =>\n process.env.NODE_ENV === \"development\" ||\n (opts.direction === \"down\" && opts.result instanceof Error),\n }),\n httpBatchLink({\n url: `${getBaseUrl()}/api/trpc`,\n }),",
"score": 0.6868129968643188
},
{
"filename": "src/server/api/trpc.ts",
"retrieved_chunk": " return next({\n ctx: {\n // infers the `session` as non-nullable\n session: { ...ctx.session, user: ctx.session.user },\n },\n });\n});\n/**\n * Protected (authenticated) procedure\n *",
"score": 0.6831967830657959
},
{
"filename": "src/server/api/routers/me.ts",
"retrieved_chunk": "import { createTRPCRouter, protectedProcedure } from \"~/server/api/trpc\";\nexport const meRouter = createTRPCRouter({\n getMe: protectedProcedure.query(({ ctx }) => {\n return ctx.prisma.user.findUnique({\n where: {\n id: ctx.session.user.id,\n },\n include: {\n activeCharacter: true,\n },",
"score": 0.6723310947418213
},
{
"filename": "src/server/api/routers/message.ts",
"retrieved_chunk": " characterId: character?.id,\n },\n });\n }),\n findAll: protectedProcedure.query(({ ctx }) => {\n return ctx.prisma.message.findMany({\n where: {\n authorId: ctx.session.user.id,\n },\n include: {",
"score": 0.6695108413696289
}
] | typescript | pass: env.EMAIL_SERVER_PASSWORD
} |
import { createTRPCRouter, protectedProcedure } from "~/server/api/trpc";
import { z } from "zod";
import { createOpenAICompletion } from "~/external/openai/chatGPTApi";
import { ChatGPTMessage } from "~/external/openai/chatGPTMessage";
import { parseActionCode, stringifyActionCode } from "~/external/openai/chatGPTActionItems";
export const messageRouter = createTRPCRouter({
create: protectedProcedure
.input(
z.object({
content: z.string().min(1).max(200),
})
)
.mutation(({ input, ctx }) => {
return ctx.prisma.message.create({
data: {
content: input.content,
authorId: ctx.session.user.id,
},
});
}),
generateGPT: protectedProcedure.mutation(async ({ ctx }) => {
const todoList = await ctx.prisma.todo.findMany({
where: {
authorId: ctx.session.user.id,
},
});
const lastNMessages = await ctx.prisma.message.findMany({
where: {
authorId: ctx.session.user.id,
},
orderBy: {
createdAt: "desc",
},
take: 5,
include: {
character: true,
},
});
const character = await ctx.prisma.user.findUnique({
where: {
id: ctx.session.user.id,
},
}).activeCharacter();
const chatGptResponse = await createOpenAICompletion(
{
type: "assistant",
characterDescription: character?.content ?? "The depressed robot from Hitchhiker's Guide to the Galaxy",
characterName: character?.name ?? "Marvin",
exampleConverstationStart: character?.exampleConverstationStart ?? "Here I am, brain the size of a planet, and this is what they ask me to do",
actions: []
},
todoList,
lastNMessages.reverse().map((message) => {
if (message.isGPT) {
return {
type: "assistant",
characterDescription: message.character?.content,
characterName: message.character?.name,
| actions: parseActionCode(message.content),
} as ChatGPTMessage; |
}
return {
type: "user",
content: message.content,
} as ChatGPTMessage;
}),
);
for (const action of chatGptResponse.actions) {
if (action.type === "add") {
await ctx.prisma.todo.create({
data: {
title: action.content,
due: action.due,
authorId: ctx.session.user.id,
},
});
}
if (action.type === "complete") {
await ctx.prisma.todo.update({
where: {
id: action.id,
},
data: {
done: true,
},
});
}
if (action.type === "delete") {
await ctx.prisma.todo.delete({
where: {
id: action.id,
},
});
}
if (action.type === "uncomplete") {
await ctx.prisma.todo.update({
where: {
id: action.id,
},
data: {
done: false,
},
});
}
}
return ctx.prisma.message.create({
data: {
content: stringifyActionCode(chatGptResponse.actions),
authorId: ctx.session.user.id,
isGPT: true,
characterId: character?.id,
},
});
}),
findAll: protectedProcedure.query(({ ctx }) => {
return ctx.prisma.message.findMany({
where: {
authorId: ctx.session.user.id,
},
include: {
character: true,
},
take: 6,
orderBy: {
createdAt: "desc",
},
});
}),
deleteAll: protectedProcedure.mutation(({ ctx }) => {
return ctx.prisma.message.deleteMany({
where: {
authorId: ctx.session.user.id,
},
});
}),
});
| src/server/api/routers/message.ts | hackathon-ufrt-gptnotes-e185e8c | [
{
"filename": "src/external/openai/chatGPTApi.ts",
"retrieved_chunk": " content: message.type === \"assistant\" ? stringifyActionCode(message.actions) : message.content,\n role: message.type === \"assistant\" ? ChatCompletionRequestMessageRoleEnum.Assistant : ChatCompletionRequestMessageRoleEnum.User as ChatCompletionRequestMessageRoleEnum,\n };\n });\n messages = [{\n content: system,\n role: ChatCompletionRequestMessageRoleEnum.System,\n }, ...messages];\n // Run some checks to prevent abuse\n if (messages.length >= 7) {",
"score": 0.819688081741333
},
{
"filename": "src/external/openai/chatGPTMessage.ts",
"retrieved_chunk": "import { ChatGPTActionItems } from \"./chatGPTActionItems\";\nexport type ChatGPTCharacter = {\n type: \"assistant\",\n characterDescription: string,\n characterName: string,\n exampleConverstationStart: string,\n actions: ChatGPTActionItems[],\n}\nexport type ChatGPTUser = {\n type: \"user\",",
"score": 0.804608941078186
},
{
"filename": "src/external/openai/chatGPTActionItems.ts",
"retrieved_chunk": "};\nexport type ChatGPTActionItems = ChatGPTAction | ChatGPTActionComplete | ChatGPTActionPrint | ChatGPTActionDelete | ChatGPTActionUncomplete;\nexport function stringifyActionCode(actions: ChatGPTActionItems[]): string {\n return actions.map((action) => {\n switch (action.type) {\n case \"add\":\n return `ADD(${action.due.toDateString()}, \"${action.content}\")`;\n case \"complete\":\n return `COMPLETE(${action.id})`;\n case \"delete\":",
"score": 0.7933003902435303
},
{
"filename": "src/external/openai/chatGPTApi.ts",
"retrieved_chunk": " if (todoList.find((todo) => todo.id === action.id) === undefined) {\n throw new Error(`Invalid todo id ${action.id}`);\n }\n }\n }\n return {\n type: \"assistant\",\n characterName: currentCharacter.characterName,\n characterDescription: currentCharacter.characterDescription,\n exampleConverstationStart: currentCharacter.exampleConverstationStart,",
"score": 0.7776786684989929
},
{
"filename": "src/external/openai/chatGPTApi.ts",
"retrieved_chunk": " apiKey: env.OPENAI_API_KEY,\n});\nconst openai = new OpenAIApi(configuration);\nexport async function listModels() {\n const response = await openai.listModels();\n const models = response.data.data.map((model) => model.id);\n return models;\n}\nexport async function createOpenAICompletion(currentCharacter: ChatGPTCharacter, todoList: ChatGPTTodo[], chatHistory: ChatGPTMessage[]): Promise<ChatGPTCharacter> {\n const exampleTodoItem = todoList.length > 0 ? todoList[0]?.title ?? \"do something\" : \"do something\";",
"score": 0.76532381772995
}
] | typescript | actions: parseActionCode(message.content),
} as ChatGPTMessage; |
/**
* This is the client-side entrypoint for your tRPC API. It is used to create the `api` object which
* contains the Next.js App-wrapper, as well as your type-safe React Query hooks.
*
* We also create a few inference helpers for input and output types.
*/
import { httpBatchLink, loggerLink } from "@trpc/client";
import { createTRPCNext } from "@trpc/next";
import { type inferRouterInputs, type inferRouterOutputs } from "@trpc/server";
import superjson from "superjson";
import { type AppRouter } from "~/server/api/root";
const getBaseUrl = () => {
if (typeof window !== "undefined") return ""; // browser should use relative url
if (process.env.VERCEL_URL) return `https://${process.env.VERCEL_URL}`; // SSR should use vercel url
return `http://localhost:${process.env.PORT ?? 3000}`; // dev SSR should use localhost
};
/** A set of type-safe react-query hooks for your tRPC API. */
export const api = createTRPCNext<AppRouter>({
config() {
return {
/**
* Transformer used for data de-serialization from the server.
*
* @see https://trpc.io/docs/data-transformers
*/
transformer: superjson,
/**
* Links used to determine request flow from client to server.
*
* @see https://trpc.io/docs/links
*/
links: [
loggerLink({
enabled: (opts) =>
process.env.NODE_ENV === "development" ||
(opts.direction === "down" && opts.result instanceof Error),
}),
httpBatchLink({
url: `${getBaseUrl()}/api/trpc`,
}),
],
};
},
/**
* Whether tRPC should await queries when server rendering pages.
*
* @see https://trpc.io/docs/nextjs#ssr-boolean-default-false
*/
ssr: false,
});
/**
* Inference helper for inputs.
*
* @example type HelloInput = RouterInputs['example']['hello']
*/
export type RouterInputs = inferRouterInputs | <AppRouter>; |
/**
* Inference helper for outputs.
*
* @example type HelloOutput = RouterOutputs['example']['hello']
*/
export type RouterOutputs = inferRouterOutputs<AppRouter>;
| src/utils/api.ts | hackathon-ufrt-gptnotes-e185e8c | [
{
"filename": "src/server/api/trpc.ts",
"retrieved_chunk": " */\n/**\n * This is how you create new routers and sub-routers in your tRPC API.\n *\n * @see https://trpc.io/docs/router\n */\nexport const createTRPCRouter = t.router;\n/**\n * Public (unauthenticated) procedure\n *",
"score": 0.7743401527404785
},
{
"filename": "src/server/auth.ts",
"retrieved_chunk": " */\n ]\n};\n/**\n * Wrapper for `getServerSession` so that you don't need to import the `authOptions` in every file.\n *\n * @see https://next-auth.js.org/configuration/nextjs\n */\nexport const getServerAuthSession = (ctx: {\n req: GetServerSidePropsContext[\"req\"];",
"score": 0.7663018703460693
},
{
"filename": "src/server/auth.ts",
"retrieved_chunk": "/**\n * Options for NextAuth.js used to configure adapters, providers, callbacks, etc.\n *\n * @see https://next-auth.js.org/configuration/options\n */\nexport const authOptions: NextAuthOptions = {\n callbacks: {\n session: ({ session, user }) => ({\n ...session,\n user: {",
"score": 0.7604047656059265
},
{
"filename": "src/server/api/root.ts",
"retrieved_chunk": "export const appRouter = createTRPCRouter({\n todo: todoRouter,\n message: messageRouter,\n character: characterRouter,\n me: meRouter,\n});\n// export type definition of API\nexport type AppRouter = typeof appRouter;",
"score": 0.7571391463279724
},
{
"filename": "src/server/auth.ts",
"retrieved_chunk": "import { prisma } from \"~/server/db\";\n/**\n * Module augmentation for `next-auth` types. Allows us to add custom properties to the `session`\n * object and keep type safety.\n *\n * @see https://next-auth.js.org/getting-started/typescript#module-augmentation\n */\ndeclare module \"next-auth\" {\n interface Session extends DefaultSession {\n user: {",
"score": 0.7455682754516602
}
] | typescript | <AppRouter>; |
import { type GetServerSidePropsContext } from "next";
import {
getServerSession,
type NextAuthOptions,
type DefaultSession,
} from "next-auth";
import GoogleProvider from "next-auth/providers/google";
import EmailProvider from "next-auth/providers/email";
import { PrismaAdapter } from "@next-auth/prisma-adapter";
import { env } from "~/env.mjs";
import { prisma } from "~/server/db";
/**
* Module augmentation for `next-auth` types. Allows us to add custom properties to the `session`
* object and keep type safety.
*
* @see https://next-auth.js.org/getting-started/typescript#module-augmentation
*/
declare module "next-auth" {
interface Session extends DefaultSession {
user: {
id: string;
// ...other properties
// role: UserRole;
} & DefaultSession["user"];
}
// interface User {
// // ...other properties
// // role: UserRole;
// }
}
/**
* Options for NextAuth.js used to configure adapters, providers, callbacks, etc.
*
* @see https://next-auth.js.org/configuration/options
*/
export const authOptions: NextAuthOptions = {
callbacks: {
session: ({ session, user }) => ({
...session,
user: {
...session.user,
id: user.id,
},
}),
},
adapter: PrismaAdapter(prisma),
providers: [
EmailProvider({
server: {
host: env.EMAIL_SERVER_HOST,
port: env.EMAIL_SERVER_PORT,
auth: {
user: env | .EMAIL_SERVER_USER,
pass: env.EMAIL_SERVER_PASSWORD
} |
},
from: env.EMAIL_FROM
}),
GoogleProvider({
clientId: env.GOOGLE_CLIENT_ID,
clientSecret: env.GOOGLE_CLIENT_SECRET,
}),
/**
* ...add more providers here.
*
* Most other providers require a bit more work than the Discord provider. For example, the
* GitHub provider requires you to add the `refresh_token_expires_in` field to the Account
* model. Refer to the NextAuth.js docs for the provider you want to use. Example:
*
* @see https://next-auth.js.org/providers/github
*/
]
};
/**
* Wrapper for `getServerSession` so that you don't need to import the `authOptions` in every file.
*
* @see https://next-auth.js.org/configuration/nextjs
*/
export const getServerAuthSession = (ctx: {
req: GetServerSidePropsContext["req"];
res: GetServerSidePropsContext["res"];
}) => {
return getServerSession(ctx.req, ctx.res, authOptions);
};
| src/server/auth.ts | hackathon-ufrt-gptnotes-e185e8c | [
{
"filename": "src/utils/api.ts",
"retrieved_chunk": "import { type AppRouter } from \"~/server/api/root\";\nconst getBaseUrl = () => {\n if (typeof window !== \"undefined\") return \"\"; // browser should use relative url\n if (process.env.VERCEL_URL) return `https://${process.env.VERCEL_URL}`; // SSR should use vercel url\n return `http://localhost:${process.env.PORT ?? 3000}`; // dev SSR should use localhost\n};\n/** A set of type-safe react-query hooks for your tRPC API. */\nexport const api = createTRPCNext<AppRouter>({\n config() {\n return {",
"score": 0.6719832420349121
},
{
"filename": "src/server/api/trpc.ts",
"retrieved_chunk": " return next({\n ctx: {\n // infers the `session` as non-nullable\n session: { ...ctx.session, user: ctx.session.user },\n },\n });\n});\n/**\n * Protected (authenticated) procedure\n *",
"score": 0.657913327217102
},
{
"filename": "src/utils/api.ts",
"retrieved_chunk": " */\n links: [\n loggerLink({\n enabled: (opts) =>\n process.env.NODE_ENV === \"development\" ||\n (opts.direction === \"down\" && opts.result instanceof Error),\n }),\n httpBatchLink({\n url: `${getBaseUrl()}/api/trpc`,\n }),",
"score": 0.6543761491775513
},
{
"filename": "src/server/api/routers/me.ts",
"retrieved_chunk": "import { createTRPCRouter, protectedProcedure } from \"~/server/api/trpc\";\nexport const meRouter = createTRPCRouter({\n getMe: protectedProcedure.query(({ ctx }) => {\n return ctx.prisma.user.findUnique({\n where: {\n id: ctx.session.user.id,\n },\n include: {\n activeCharacter: true,\n },",
"score": 0.6295307278633118
},
{
"filename": "src/server/api/routers/message.ts",
"retrieved_chunk": " characterId: character?.id,\n },\n });\n }),\n findAll: protectedProcedure.query(({ ctx }) => {\n return ctx.prisma.message.findMany({\n where: {\n authorId: ctx.session.user.id,\n },\n include: {",
"score": 0.6273375749588013
}
] | typescript | .EMAIL_SERVER_USER,
pass: env.EMAIL_SERVER_PASSWORD
} |
import { type GetServerSidePropsContext } from "next";
import {
getServerSession,
type NextAuthOptions,
type DefaultSession,
} from "next-auth";
import GoogleProvider from "next-auth/providers/google";
import EmailProvider from "next-auth/providers/email";
import { PrismaAdapter } from "@next-auth/prisma-adapter";
import { env } from "~/env.mjs";
import { prisma } from "~/server/db";
/**
* Module augmentation for `next-auth` types. Allows us to add custom properties to the `session`
* object and keep type safety.
*
* @see https://next-auth.js.org/getting-started/typescript#module-augmentation
*/
declare module "next-auth" {
interface Session extends DefaultSession {
user: {
id: string;
// ...other properties
// role: UserRole;
} & DefaultSession["user"];
}
// interface User {
// // ...other properties
// // role: UserRole;
// }
}
/**
* Options for NextAuth.js used to configure adapters, providers, callbacks, etc.
*
* @see https://next-auth.js.org/configuration/options
*/
export const authOptions: NextAuthOptions = {
callbacks: {
session: ({ session, user }) => ({
...session,
user: {
...session.user,
id: user.id,
},
}),
},
adapter: PrismaAdapter(prisma),
providers: [
EmailProvider({
server: {
host: env.EMAIL_SERVER_HOST,
| port: env.EMAIL_SERVER_PORT,
auth: { |
user: env.EMAIL_SERVER_USER,
pass: env.EMAIL_SERVER_PASSWORD
}
},
from: env.EMAIL_FROM
}),
GoogleProvider({
clientId: env.GOOGLE_CLIENT_ID,
clientSecret: env.GOOGLE_CLIENT_SECRET,
}),
/**
* ...add more providers here.
*
* Most other providers require a bit more work than the Discord provider. For example, the
* GitHub provider requires you to add the `refresh_token_expires_in` field to the Account
* model. Refer to the NextAuth.js docs for the provider you want to use. Example:
*
* @see https://next-auth.js.org/providers/github
*/
]
};
/**
* Wrapper for `getServerSession` so that you don't need to import the `authOptions` in every file.
*
* @see https://next-auth.js.org/configuration/nextjs
*/
export const getServerAuthSession = (ctx: {
req: GetServerSidePropsContext["req"];
res: GetServerSidePropsContext["res"];
}) => {
return getServerSession(ctx.req, ctx.res, authOptions);
};
| src/server/auth.ts | hackathon-ufrt-gptnotes-e185e8c | [
{
"filename": "src/server/api/trpc.ts",
"retrieved_chunk": " return next({\n ctx: {\n // infers the `session` as non-nullable\n session: { ...ctx.session, user: ctx.session.user },\n },\n });\n});\n/**\n * Protected (authenticated) procedure\n *",
"score": 0.7620663642883301
},
{
"filename": "src/utils/api.ts",
"retrieved_chunk": " */\n links: [\n loggerLink({\n enabled: (opts) =>\n process.env.NODE_ENV === \"development\" ||\n (opts.direction === \"down\" && opts.result instanceof Error),\n }),\n httpBatchLink({\n url: `${getBaseUrl()}/api/trpc`,\n }),",
"score": 0.7576133012771606
},
{
"filename": "src/server/api/routers/message.ts",
"retrieved_chunk": " characterId: character?.id,\n },\n });\n }),\n findAll: protectedProcedure.query(({ ctx }) => {\n return ctx.prisma.message.findMany({\n where: {\n authorId: ctx.session.user.id,\n },\n include: {",
"score": 0.7459473609924316
},
{
"filename": "src/utils/api.ts",
"retrieved_chunk": "import { type AppRouter } from \"~/server/api/root\";\nconst getBaseUrl = () => {\n if (typeof window !== \"undefined\") return \"\"; // browser should use relative url\n if (process.env.VERCEL_URL) return `https://${process.env.VERCEL_URL}`; // SSR should use vercel url\n return `http://localhost:${process.env.PORT ?? 3000}`; // dev SSR should use localhost\n};\n/** A set of type-safe react-query hooks for your tRPC API. */\nexport const api = createTRPCNext<AppRouter>({\n config() {\n return {",
"score": 0.7413881421089172
},
{
"filename": "src/server/api/routers/message.ts",
"retrieved_chunk": " character: true,\n },\n take: 6,\n orderBy: {\n createdAt: \"desc\",\n },\n });\n }),\n deleteAll: protectedProcedure.mutation(({ ctx }) => {\n return ctx.prisma.message.deleteMany({",
"score": 0.7411268949508667
}
] | typescript | port: env.EMAIL_SERVER_PORT,
auth: { |
import { type GetServerSidePropsContext } from "next";
import {
getServerSession,
type NextAuthOptions,
type DefaultSession,
} from "next-auth";
import GoogleProvider from "next-auth/providers/google";
import EmailProvider from "next-auth/providers/email";
import { PrismaAdapter } from "@next-auth/prisma-adapter";
import { env } from "~/env.mjs";
import { prisma } from "~/server/db";
/**
* Module augmentation for `next-auth` types. Allows us to add custom properties to the `session`
* object and keep type safety.
*
* @see https://next-auth.js.org/getting-started/typescript#module-augmentation
*/
declare module "next-auth" {
interface Session extends DefaultSession {
user: {
id: string;
// ...other properties
// role: UserRole;
} & DefaultSession["user"];
}
// interface User {
// // ...other properties
// // role: UserRole;
// }
}
/**
* Options for NextAuth.js used to configure adapters, providers, callbacks, etc.
*
* @see https://next-auth.js.org/configuration/options
*/
export const authOptions: NextAuthOptions = {
callbacks: {
session: ({ session, user }) => ({
...session,
user: {
...session.user,
id: user.id,
},
}),
},
adapter: PrismaAdapter( | prisma),
providers: [
EmailProvider({ |
server: {
host: env.EMAIL_SERVER_HOST,
port: env.EMAIL_SERVER_PORT,
auth: {
user: env.EMAIL_SERVER_USER,
pass: env.EMAIL_SERVER_PASSWORD
}
},
from: env.EMAIL_FROM
}),
GoogleProvider({
clientId: env.GOOGLE_CLIENT_ID,
clientSecret: env.GOOGLE_CLIENT_SECRET,
}),
/**
* ...add more providers here.
*
* Most other providers require a bit more work than the Discord provider. For example, the
* GitHub provider requires you to add the `refresh_token_expires_in` field to the Account
* model. Refer to the NextAuth.js docs for the provider you want to use. Example:
*
* @see https://next-auth.js.org/providers/github
*/
]
};
/**
* Wrapper for `getServerSession` so that you don't need to import the `authOptions` in every file.
*
* @see https://next-auth.js.org/configuration/nextjs
*/
export const getServerAuthSession = (ctx: {
req: GetServerSidePropsContext["req"];
res: GetServerSidePropsContext["res"];
}) => {
return getServerSession(ctx.req, ctx.res, authOptions);
};
| src/server/auth.ts | hackathon-ufrt-gptnotes-e185e8c | [
{
"filename": "src/server/api/trpc.ts",
"retrieved_chunk": " return next({\n ctx: {\n // infers the `session` as non-nullable\n session: { ...ctx.session, user: ctx.session.user },\n },\n });\n});\n/**\n * Protected (authenticated) procedure\n *",
"score": 0.8487308025360107
},
{
"filename": "src/server/api/routers/message.ts",
"retrieved_chunk": " characterId: character?.id,\n },\n });\n }),\n findAll: protectedProcedure.query(({ ctx }) => {\n return ctx.prisma.message.findMany({\n where: {\n authorId: ctx.session.user.id,\n },\n include: {",
"score": 0.827465295791626
},
{
"filename": "src/server/api/routers/todo.ts",
"retrieved_chunk": " id: input.id,\n },\n data: {\n done: input.done,\n },\n });\n }),\n update: protectedProcedure\n .input(\n z.object({",
"score": 0.8125998973846436
},
{
"filename": "src/server/api/routers/message.ts",
"retrieved_chunk": " where: {\n authorId: ctx.session.user.id,\n },\n });\n }),\n});",
"score": 0.8069552183151245
},
{
"filename": "src/server/api/routers/message.ts",
"retrieved_chunk": " })\n )\n .mutation(({ input, ctx }) => {\n return ctx.prisma.message.create({\n data: {\n content: input.content,\n authorId: ctx.session.user.id,\n },\n });\n }),",
"score": 0.8056035041809082
}
] | typescript | prisma),
providers: [
EmailProvider({ |
/**
* YOU PROBABLY DON'T NEED TO EDIT THIS FILE, UNLESS:
* 1. You want to modify request context (see Part 1).
* 2. You want to create a new middleware or type of procedure (see Part 3).
*
* TL;DR - This is where all the tRPC server stuff is created and plugged in. The pieces you will
* need to use are documented accordingly near the end.
*/
/**
* 1. CONTEXT
*
* This section defines the "contexts" that are available in the backend API.
*
* These allow you to access things when processing a request, like the database, the session, etc.
*/
import { type CreateNextContextOptions } from "@trpc/server/adapters/next";
import { type Session } from "next-auth";
import { getServerAuthSession } from "~/server/auth";
import { prisma } from "~/server/db";
type CreateContextOptions = {
session: Session | null;
};
/**
* This helper generates the "internals" for a tRPC context. If you need to use it, you can export
* it from here.
*
* Examples of things you may need it for:
* - testing, so we don't have to mock Next.js' req/res
* - tRPC's `createSSGHelpers`, where we don't have req/res
*
* @see https://create.t3.gg/en/usage/trpc#-serverapitrpcts
*/
const createInnerTRPCContext = (opts: CreateContextOptions) => {
return {
session: opts.session,
prisma,
};
};
/**
* This is the actual context you will use in your router. It will be used to process every request
* that goes through your tRPC endpoint.
*
* @see https://trpc.io/docs/context
*/
export const createTRPCContext = async (opts: CreateNextContextOptions) => {
const { req, res } = opts;
// Get the session from the server using the getServerSession wrapper function
const session = await | getServerAuthSession({ req, res }); |
return createInnerTRPCContext({
session,
});
};
/**
* 2. INITIALIZATION
*
* This is where the tRPC API is initialized, connecting the context and transformer. We also parse
* ZodErrors so that you get typesafety on the frontend if your procedure fails due to validation
* errors on the backend.
*/
import { initTRPC, TRPCError } from "@trpc/server";
import superjson from "superjson";
import { ZodError } from "zod";
const t = initTRPC.context<typeof createTRPCContext>().create({
transformer: superjson,
errorFormatter({ shape, error }) {
return {
...shape,
data: {
...shape.data,
zodError:
error.cause instanceof ZodError ? error.cause.flatten() : null,
},
};
},
});
/**
* 3. ROUTER & PROCEDURE (THE IMPORTANT BIT)
*
* These are the pieces you use to build your tRPC API. You should import these a lot in the
* "/src/server/api/routers" directory.
*/
/**
* This is how you create new routers and sub-routers in your tRPC API.
*
* @see https://trpc.io/docs/router
*/
export const createTRPCRouter = t.router;
/**
* Public (unauthenticated) procedure
*
* This is the base piece you use to build new queries and mutations on your tRPC API. It does not
* guarantee that a user querying is authorized, but you can still access user session data if they
* are logged in.
*/
export const publicProcedure = t.procedure;
/** Reusable middleware that enforces users are logged in before running the procedure. */
const enforceUserIsAuthed = t.middleware(({ ctx, next }) => {
if (!ctx.session || !ctx.session.user) {
throw new TRPCError({ code: "UNAUTHORIZED" });
}
return next({
ctx: {
// infers the `session` as non-nullable
session: { ...ctx.session, user: ctx.session.user },
},
});
});
/**
* Protected (authenticated) procedure
*
* If you want a query or mutation to ONLY be accessible to logged in users, use this. It verifies
* the session is valid and guarantees `ctx.session.user` is not null.
*
* @see https://trpc.io/docs/procedures
*/
export const protectedProcedure = t.procedure.use(enforceUserIsAuthed);
| src/server/api/trpc.ts | hackathon-ufrt-gptnotes-e185e8c | [
{
"filename": "src/server/auth.ts",
"retrieved_chunk": "/**\n * Options for NextAuth.js used to configure adapters, providers, callbacks, etc.\n *\n * @see https://next-auth.js.org/configuration/options\n */\nexport const authOptions: NextAuthOptions = {\n callbacks: {\n session: ({ session, user }) => ({\n ...session,\n user: {",
"score": 0.8539905548095703
},
{
"filename": "src/utils/api.ts",
"retrieved_chunk": "/**\n * This is the client-side entrypoint for your tRPC API. It is used to create the `api` object which\n * contains the Next.js App-wrapper, as well as your type-safe React Query hooks.\n *\n * We also create a few inference helpers for input and output types.\n */\nimport { httpBatchLink, loggerLink } from \"@trpc/client\";\nimport { createTRPCNext } from \"@trpc/next\";\nimport { type inferRouterInputs, type inferRouterOutputs } from \"@trpc/server\";\nimport superjson from \"superjson\";",
"score": 0.8504119515419006
},
{
"filename": "src/server/auth.ts",
"retrieved_chunk": " clientSecret: env.GOOGLE_CLIENT_SECRET,\n }),\n /**\n * ...add more providers here.\n *\n * Most other providers require a bit more work than the Discord provider. For example, the\n * GitHub provider requires you to add the `refresh_token_expires_in` field to the Account\n * model. Refer to the NextAuth.js docs for the provider you want to use. Example:\n *\n * @see https://next-auth.js.org/providers/github",
"score": 0.8268477916717529
},
{
"filename": "src/utils/api.ts",
"retrieved_chunk": " /**\n * Transformer used for data de-serialization from the server.\n *\n * @see https://trpc.io/docs/data-transformers\n */\n transformer: superjson,\n /**\n * Links used to determine request flow from client to server.\n *\n * @see https://trpc.io/docs/links",
"score": 0.8250290155410767
},
{
"filename": "src/server/auth.ts",
"retrieved_chunk": " */\n ]\n};\n/**\n * Wrapper for `getServerSession` so that you don't need to import the `authOptions` in every file.\n *\n * @see https://next-auth.js.org/configuration/nextjs\n */\nexport const getServerAuthSession = (ctx: {\n req: GetServerSidePropsContext[\"req\"];",
"score": 0.8082379102706909
}
] | typescript | getServerAuthSession({ req, res }); |
import { createTRPCRouter, protectedProcedure } from "~/server/api/trpc";
import { z } from "zod";
import { createOpenAICompletion } from "~/external/openai/chatGPTApi";
import { ChatGPTMessage } from "~/external/openai/chatGPTMessage";
import { parseActionCode, stringifyActionCode } from "~/external/openai/chatGPTActionItems";
export const messageRouter = createTRPCRouter({
create: protectedProcedure
.input(
z.object({
content: z.string().min(1).max(200),
})
)
.mutation(({ input, ctx }) => {
return ctx.prisma.message.create({
data: {
content: input.content,
authorId: ctx.session.user.id,
},
});
}),
generateGPT: protectedProcedure.mutation(async ({ ctx }) => {
const todoList = await ctx.prisma.todo.findMany({
where: {
authorId: ctx.session.user.id,
},
});
const lastNMessages = await ctx.prisma.message.findMany({
where: {
authorId: ctx.session.user.id,
},
orderBy: {
createdAt: "desc",
},
take: 5,
include: {
character: true,
},
});
const character = await ctx.prisma.user.findUnique({
where: {
id: ctx.session.user.id,
},
}).activeCharacter();
| const chatGptResponse = await createOpenAICompletion(
{ |
type: "assistant",
characterDescription: character?.content ?? "The depressed robot from Hitchhiker's Guide to the Galaxy",
characterName: character?.name ?? "Marvin",
exampleConverstationStart: character?.exampleConverstationStart ?? "Here I am, brain the size of a planet, and this is what they ask me to do",
actions: []
},
todoList,
lastNMessages.reverse().map((message) => {
if (message.isGPT) {
return {
type: "assistant",
characterDescription: message.character?.content,
characterName: message.character?.name,
actions: parseActionCode(message.content),
} as ChatGPTMessage;
}
return {
type: "user",
content: message.content,
} as ChatGPTMessage;
}),
);
for (const action of chatGptResponse.actions) {
if (action.type === "add") {
await ctx.prisma.todo.create({
data: {
title: action.content,
due: action.due,
authorId: ctx.session.user.id,
},
});
}
if (action.type === "complete") {
await ctx.prisma.todo.update({
where: {
id: action.id,
},
data: {
done: true,
},
});
}
if (action.type === "delete") {
await ctx.prisma.todo.delete({
where: {
id: action.id,
},
});
}
if (action.type === "uncomplete") {
await ctx.prisma.todo.update({
where: {
id: action.id,
},
data: {
done: false,
},
});
}
}
return ctx.prisma.message.create({
data: {
content: stringifyActionCode(chatGptResponse.actions),
authorId: ctx.session.user.id,
isGPT: true,
characterId: character?.id,
},
});
}),
findAll: protectedProcedure.query(({ ctx }) => {
return ctx.prisma.message.findMany({
where: {
authorId: ctx.session.user.id,
},
include: {
character: true,
},
take: 6,
orderBy: {
createdAt: "desc",
},
});
}),
deleteAll: protectedProcedure.mutation(({ ctx }) => {
return ctx.prisma.message.deleteMany({
where: {
authorId: ctx.session.user.id,
},
});
}),
});
| src/server/api/routers/message.ts | hackathon-ufrt-gptnotes-e185e8c | [
{
"filename": "src/server/api/routers/character.ts",
"retrieved_chunk": " .mutation(({ input, ctx }) => {\n return ctx.prisma.character.create({\n data: {\n name: input.name,\n content: input.content,\n authorId: ctx.session.user.id,\n },\n });\n }),\n findAll: publicProcedure.query(({ ctx }) => {",
"score": 0.8831483125686646
},
{
"filename": "src/server/api/routers/character.ts",
"retrieved_chunk": " .mutation(({ input, ctx }) => {\n return ctx.prisma.user.update({\n where: {\n id: ctx.session.user.id,\n },\n data: {\n activeCharacterId: input.id,\n },\n });\n }),",
"score": 0.8789678812026978
},
{
"filename": "src/server/api/routers/me.ts",
"retrieved_chunk": "import { createTRPCRouter, protectedProcedure } from \"~/server/api/trpc\";\nexport const meRouter = createTRPCRouter({\n getMe: protectedProcedure.query(({ ctx }) => {\n return ctx.prisma.user.findUnique({\n where: {\n id: ctx.session.user.id,\n },\n include: {\n activeCharacter: true,\n },",
"score": 0.8435908555984497
},
{
"filename": "src/server/api/routers/todo.ts",
"retrieved_chunk": " id: z.string(),\n })\n )\n .mutation(async ({ input, ctx }) => {\n const todo = await ctx.prisma.todo.findFirst({\n where: {\n id: input.id,\n authorId: ctx.session.user.id,\n },\n });",
"score": 0.8432479500770569
},
{
"filename": "src/server/api/routers/character.ts",
"retrieved_chunk": " return ctx.prisma.character.findMany({\n where: {},\n });\n }),\n setActiveCharacter: protectedProcedure\n .input(\n z.object({\n id: z.string().min(1),\n })\n )",
"score": 0.8093651533126831
}
] | typescript | const chatGptResponse = await createOpenAICompletion(
{ |
/**
* @swagger
* components:
* schemas:
* SignupRequest:
* type: object
* required:
* - email
* - password
* - name
* properties:
* name:
* type: string
* description: The user name
* email:
* type: string
* description: The user email address
* password:
* type: string
* description: The user password
* example:
* name: John Doe
* email: [email protected]
* password: password123
* LoginRequest:
* type: object
* required:
* - email
* - password
* properties:
* email:
* type: string
* description: The user email address
* password:
* type: string
* description: The user password
* example:
* email: [email protected]
* password: password123
*/
import express from 'express';
import { transferFund } from '../service';
import { protect } from '../../../middleware';
const router = express.Router();
/**
* @swagger
* /api/v1/account/transfer:
* post:
* tags:
* - Transfer
* summary: Transfer funds between accounts
* security:
* - BearerAuth: []
* requestBody:
* required: true
* content:
* application/json:
* schema:
* type: object
* properties:
* fromAccountId:
* type: string
* description: The ID of the account to transfer funds from.
* example: "123456"
* toAccountId:
* type: string
* description: The ID of the account to transfer funds to.
* example: "789012"
* amount:
* type: number
* description: The amount of funds to transfer.
* example: 1000.00
* tag:
* type: string
* description: The tag associated with the transfer.
* example: "Rent payment"
* responses:
* '200':
* description: Successful transfer of funds
* '400':
* description: Invalid request parameters
* '401':
* description: Unauthorized request
*/
router.post(' | /transfer', protect, transferFund); |
export default router;
| src/modules/account/controller/index.ts | walosha-BACKEND_DEV_TESTS-db2fcb4 | [
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": " * security:\n * - bearerAuth: []\n * responses:\n * \"200\":\n * description: The user profile\n * \"401\":\n * description: Unauthorized\n */\nrouter.post('/me', protect, getMe);\nexport default router;",
"score": 0.9070289134979248
},
{
"filename": "src/modules/auth/controller/users.ts",
"retrieved_chunk": " * description: The ID of the user to delete\n * responses:\n * \"204\":\n * description: User deleted successfully\n * \"401\":\n * description: Unauthorized\n * \"404\":\n * description: User not found\n */\n// A simple case where users can only delete themselves not the admin",
"score": 0.9027256965637207
},
{
"filename": "src/modules/auth/controller/users.ts",
"retrieved_chunk": " * tags: [User]\n * security:\n * - bearerAuth: []\n * responses:\n * \"200\":\n * description: A list of users\n * content:\n * application/json:\n * schema:\n * type: array",
"score": 0.8712995052337646
},
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": " * post:\n * summary: Refreshes the access token\n * tags: [Auth]\n * requestBody:\n * required: true\n * content:\n * application/json:\n * schema:\n * type: object\n * required:",
"score": 0.8598259687423706
},
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": " * tags: [Auth]\n * requestBody:\n * required: true\n * content:\n * application/json:\n * schema:\n * $ref: '#/components/schemas/SignupRequest'\n * responses:\n * \"200\":\n * description: The created user.",
"score": 0.8544639348983765
}
] | typescript | /transfer', protect, transferFund); |
// for license and copyright look at the repository
import { IPullRequest } from './Interfaces/PullRequestTypes'
import { IReport, IReportConfigurationEntry } from './Interfaces/ReportTypes'
import { tsMarkdown, table, TableEntry, H1Entry, H3Entry, MarkdownEntry } from 'ts-markdown'
import { ConfigurationCategory, ConfigurationCategoryTitleMap } from './Report.Definitions'
export class ReportGenerator {
DescriptionHeaderLabel = 'Description'
ValueHeaderLabel = 'Value'
public Generate(pr: IPullRequest, report: IReport): string {
const header = this.GenerateHeader(pr, report)
const table = this.GenerateMeasureTable(pr, report)
const reportElements = [header, ...table]
return tsMarkdown(reportElements)
}
public GenerateHeader(pr: IPullRequest, report: IReport): H1Entry {
const title = { h1: `${report.Description} (#${pr.id})` }
return title
}
public GetMeasurementEntries(entries: IReportConfigurationEntry[]): IReportConfigurationEntry[] {
if (entries !== undefined && entries !== null && entries.length > 0) {
return entries.filter((entry) => ConfigurationCategory[entry.Info.ConfigurationCategory].endsWith('Measures'))
}
return []
}
public GenerateMeasureTable(pr: IPullRequest, report: IReport): MarkdownEntry[] {
const tables: MarkdownEntry[] = []
const entries = this.GetMeasurementEntries(report.Entries)
const categories = new Set(entries.map((entry) => entry.Info.ConfigurationCategory))
categories.forEach((category) => {
tables.push(this.GenerateCategoryTitle(category))
tables.push(this.GenerateCategoryTable(pr, report, category))
})
return tables
}
private GenerateCategoryTitle(measureCategory: ConfigurationCategory): H3Entry {
const title = { h3: `${ | ConfigurationCategoryTitleMap.get(measureCategory) || 'No category'}` } |
return title
}
private GenerateCategoryTable(pr: IPullRequest, report: IReport, measureCategory: ConfigurationCategory): TableEntry {
const entries = this.GetMeasurementEntries(report.Entries)
const categoryEntries = entries.filter((entry) => entry.Info.ConfigurationCategory === measureCategory)
categoryEntries.forEach((entry) => {
entry.Info.Value = entry.PullRequestCallback(pr)
})
const rows = categoryEntries.map((entry) => ({
Description: entry.Info.Description,
Value: entry.Info.Value,
}))
return table({
columns: [{ name: this.DescriptionHeaderLabel }, { name: this.ValueHeaderLabel }],
rows: rows,
})
}
}
| src/Report.Generation.ts | philips-software-pull-request-report-action-3390d78 | [
{
"filename": "src/Report.Measures.ts",
"retrieved_chunk": " // get the property value of inputs\n entry.Info.ConfigValue = (configValues as { [key: string]: string | number })[entry.Info.ConfigurationName]\n })\n return measurementEntries\n}\nexport const GetActiveMeasures = (entries: Array<ReportConfigurationEntry>): Array<ReportConfigurationEntry> => {\n return entries.filter((entry) => entry.Info.ConfigValue === 'yes')\n}\nexport const ReportConfigurationTable = new Array<ReportConfigurationEntry>()\nReportConfigurationTable.push(",
"score": 0.7917386293411255
},
{
"filename": "src/Report.Measures.ts",
"retrieved_chunk": " GetReviewCount,\n GetCommentCount,\n} from './Report.Functions'\nimport { ConfigurationInputs } from './action.config.type'\nexport const UpdateConfigValues = (\n configValues: ConfigurationInputs,\n measurementEntries: Array<ReportConfigurationEntry>,\n): Array<ReportConfigurationEntry> => {\n // Update measurementEntries with config values from inputs\n measurementEntries.forEach((entry) => {",
"score": 0.7647273540496826
},
{
"filename": "src/Report.Definitions.ts",
"retrieved_chunk": "// for license and copyright look at the repository\nimport { IReport, IReportConfigurationEntry, IReportConfigInfo, PullRequestCallback } from './Interfaces/ReportTypes'\nexport enum ConfigurationCategory {\n None,\n StaticMeasures,\n TimeRelatedMeasures,\n StatusCheckRelatedMeasures,\n ReportGeneratorValue,\n}\nexport const ConfigurationCategoryTitleMap = new Map<ConfigurationCategory, string>([",
"score": 0.7311519980430603
},
{
"filename": "src/Report.Measures.ts",
"retrieved_chunk": " 'AddPrReportAsComment',\n 'yes',\n ConfigurationCategory.ReportGeneratorValue,\n ),\n () => 0,\n ),\n)\nReportConfigurationTable.push(\n new ReportConfigurationEntry(\n 'title_string',",
"score": 0.7258577346801758
},
{
"filename": "src/Report.Definitions.ts",
"retrieved_chunk": " ) {\n this.Description = label\n this.PresentationValue = presentationValue\n this.Value = value\n this.ConfigurationName = configName\n this.ConfigValue = defaultConfigValue\n this.ConfigurationCategory = configurationCategory\n }\n}\nexport class ReportConfigurationEntry implements IReportConfigurationEntry {",
"score": 0.7239434719085693
}
] | typescript | ConfigurationCategoryTitleMap.get(measureCategory) || 'No category'}` } |
// for license and copyright look at the repository
import { IReport, IReportConfigurationEntry, IReportConfigInfo, PullRequestCallback } from './Interfaces/ReportTypes'
export enum ConfigurationCategory {
None,
StaticMeasures,
TimeRelatedMeasures,
StatusCheckRelatedMeasures,
ReportGeneratorValue,
}
export const ConfigurationCategoryTitleMap = new Map<ConfigurationCategory, string>([
[ConfigurationCategory.None, 'None'],
[ConfigurationCategory.StaticMeasures, 'Static measures'],
[ConfigurationCategory.TimeRelatedMeasures, 'Time related measures'],
[ConfigurationCategory.StatusCheckRelatedMeasures, 'Status check related measures'],
[ConfigurationCategory.ReportGeneratorValue, 'Report generator related predefined strings'],
])
export class ConfigurationInfo implements IReportConfigInfo {
public Description
public PresentationValue
public Value
public ConfigurationName
public ConfigValue
public ConfigurationCategory
constructor(
label: string,
presentationValue: string | number,
value: string | number,
configName: string,
defaultConfigValue: string | number,
configurationCategory: ConfigurationCategory,
) {
this.Description = label
this.PresentationValue = presentationValue
this.Value = value
this.ConfigurationName = configName
this.ConfigValue = defaultConfigValue
this.ConfigurationCategory = configurationCategory
}
}
| export class ReportConfigurationEntry implements IReportConfigurationEntry { |
public Id
public Info
public PullRequestCallback: PullRequestCallback
constructor(id = '', info: IReportConfigInfo, measureCallback: PullRequestCallback = () => '') {
this.Id = id
this.Info = info
this.PullRequestCallback = measureCallback
}
}
export class Report implements IReport {
public Id = ''
public Description = ''
public Entries: ReportConfigurationEntry[] = []
}
| src/Report.Definitions.ts | philips-software-pull-request-report-action-3390d78 | [
{
"filename": "src/Interfaces/ReportTypes.ts",
"retrieved_chunk": "// for license and copyright look at the repository\nimport { ConfigurationCategory } from '../Report.Definitions'\nimport { IPullRequest } from './PullRequestTypes'\nexport type PullRequestCallback = (pr: IPullRequest) => string | number\nexport interface IReportConfigInfo {\n Description: string\n PresentationValue: string | number\n Value: string | number\n ConfigurationName: string\n ConfigurationCategory: ConfigurationCategory",
"score": 0.828599214553833
},
{
"filename": "src/Interfaces/ReportTypes.ts",
"retrieved_chunk": " ConfigValue: string | number\n}\nexport interface IReportConfigurationEntry {\n Id: string\n Info: IReportConfigInfo\n PullRequestCallback: PullRequestCallback\n}\nexport interface IReport {\n Id: string\n Description: string",
"score": 0.8135509490966797
},
{
"filename": "src/Report.Generation.ts",
"retrieved_chunk": "// for license and copyright look at the repository\nimport { IPullRequest } from './Interfaces/PullRequestTypes'\nimport { IReport, IReportConfigurationEntry } from './Interfaces/ReportTypes'\nimport { tsMarkdown, table, TableEntry, H1Entry, H3Entry, MarkdownEntry } from 'ts-markdown'\nimport { ConfigurationCategory, ConfigurationCategoryTitleMap } from './Report.Definitions'\nexport class ReportGenerator {\n DescriptionHeaderLabel = 'Description'\n ValueHeaderLabel = 'Value'\n public Generate(pr: IPullRequest, report: IReport): string {\n const header = this.GenerateHeader(pr, report)",
"score": 0.7966878414154053
},
{
"filename": "src/Report.Generation.ts",
"retrieved_chunk": " tables.push(this.GenerateCategoryTable(pr, report, category))\n })\n return tables\n }\n private GenerateCategoryTitle(measureCategory: ConfigurationCategory): H3Entry {\n const title = { h3: `${ConfigurationCategoryTitleMap.get(measureCategory) || 'No category'}` }\n return title\n }\n private GenerateCategoryTable(pr: IPullRequest, report: IReport, measureCategory: ConfigurationCategory): TableEntry {\n const entries = this.GetMeasurementEntries(report.Entries)",
"score": 0.7807868123054504
},
{
"filename": "src/Report.Measures.ts",
"retrieved_chunk": " 'AddPrReportAsComment',\n 'yes',\n ConfigurationCategory.ReportGeneratorValue,\n ),\n () => 0,\n ),\n)\nReportConfigurationTable.push(\n new ReportConfigurationEntry(\n 'title_string',",
"score": 0.7689938545227051
}
] | typescript | export class ReportConfigurationEntry implements IReportConfigurationEntry { |
// for license and copyright look at the repository
import {
IPullRequest,
IPullRequestComment,
IPullRequestCommit,
IPullRequestReview,
} from './Interfaces/PullRequestTypes'
import { EventWithTime } from './Interfaces/ReportTypes'
import { StatusCheck } from './PullRequest.Definitions'
export const GenerateEventTimeline = (pullRequest: IPullRequest): EventWithTime[] => {
const events: EventWithTime[][] = []
// merge all interesting events into a single list
events.push([
{ type: 'createAt', date: new Date(pullRequest.createdAt), event_instance: pullRequest.createdAt, time: 0 },
])
events.push(
pullRequest.commits.map((commit) => ({
type: 'commit',
date: new Date(commit.authorDate),
event_instance: commit,
time: 0,
})),
)
events.push(
pullRequest.reviews.map((review) => ({
type: 'review',
date: new Date(review.submittedAt),
event_instance: review,
time: 0,
})),
)
events.push(
pullRequest.statusChecks.map((statusCheck) => ({
type: 'statusCheck',
date: new Date(statusCheck.completedAt),
event_instance: statusCheck,
time: 0,
})),
)
events.push(
pullRequest.comments.map((comment) => ({
type: 'comment',
date: new Date(comment.createdAt),
event_instance: comment,
time: 0,
})),
)
events.push([
{ type: 'mergedAt', date: new Date(pullRequest.mergedAt), event_instance: pullRequest.mergedAt, time: 0 },
])
events.push([
{ type: 'closedAt', date: new Date(pullRequest.closedAt), event_instance: pullRequest.closedAt, time: 0 },
])
// flatten the list
const flattenedEvents = events.flat()
// filter out events that don't have a valid date
const filteredEvents = flattenedEvents.filter((event) => event.date !== null)
// sort the events by date
filteredEvents.sort((a, b) => a.date.getTime() - b.date.getTime())
// now, create a list of events with the time between events
const eventsWithTime: EventWithTime[] = []
// calculate the time between events
for (let i = 0; i < filteredEvents.length; i++) {
if (i === 0) {
eventsWithTime.push({
type: filteredEvents[i].type,
date: filteredEvents[i].date,
time: 0,
| event_instance: filteredEvents[i].event_instance,
})
} else { |
eventsWithTime.push({
type: filteredEvents[i].type,
date: filteredEvents[i].date,
time: (filteredEvents[i].date.getTime() - filteredEvents[i - 1].date.getTime()) / 1000,
event_instance: filteredEvents[i].event_instance,
})
}
}
return eventsWithTime
}
export const MillisecondsToReadableDuration = (leadTimeInMSec: number) => {
const seconds = +(leadTimeInMSec / 1000).toFixed(1)
const minutes = +(leadTimeInMSec / (1000 * 60)).toFixed(1)
const hours = +(leadTimeInMSec / (1000 * 60 * 60)).toFixed(1)
const days = +(leadTimeInMSec / (1000 * 60 * 60 * 24)).toFixed(1)
if (seconds < 60) return `${seconds} Sec`
else if (minutes < 60) return `${minutes} Min`
else if (hours < 24) return `${hours} Hours`
else return `${days} Days`
}
export const GetMergedOrClosedDate = (pullRequest: IPullRequest): string => {
let mergedOrClosedAt = pullRequest.mergedAt
if (mergedOrClosedAt == null) mergedOrClosedAt = pullRequest.closedAt
return mergedOrClosedAt
}
export const GetLeadTimeForPullRequest = (pullRequest: IPullRequest) => {
// parse createAt as date from string
const createAt = new Date(pullRequest.createdAt)
const mergedOrClosedAt = new Date(GetMergedOrClosedDate(pullRequest))
const duration = mergedOrClosedAt.getTime() - createAt.getTime()
if (duration <= 0 || isNaN(duration)) return 0
return duration
}
export const GetTimeSpendOnBranchBeforePRCreated = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const createAtEvent = eventTimeline.find((event) => event.type === 'createAt')
const firstCommitEvent = eventTimeline.find((event) => event.type === 'commit')
if (!createAtEvent || !firstCommitEvent) return 0
const duration = createAtEvent.date.getTime() - firstCommitEvent.date.getTime()
if (duration <= 0 || isNaN(duration)) return 0
return duration
}
export const GetTimeSpendOnBranchBeforePRMerged = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const mergedAtEvent = eventTimeline.find((event) => event.type === 'mergedAt')
const firstCommitEvent = eventTimeline.find((event) => event.type === 'commit')
if (mergedAtEvent && firstCommitEvent && mergedAtEvent.date.getTime() > firstCommitEvent.date.getTime()) {
return mergedAtEvent.date.getTime() - firstCommitEvent.date.getTime()
}
return -1
}
export const GetTimeToMergeAfterLastReview = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const mergedAtEvent = eventTimeline.find((event) => event.type === 'mergedAt')
const reviewEvents = eventTimeline.filter((event) => event.type === 'review')
if (reviewEvents.length <= 0) {
return -1
}
const lastReviewEvent = reviewEvents.reverse()[0]
if (mergedAtEvent && lastReviewEvent && mergedAtEvent.date.getTime() > lastReviewEvent.date.getTime()) {
return mergedAtEvent.date.getTime() - lastReviewEvent.date.getTime()
}
return -1
}
export const GetTotalRuntimeForLastStatusCheckRun = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const statusCheckEvents = eventTimeline
.filter((event) => event.type === 'statusCheck')
.map((event) => event.event_instance as StatusCheck)
.filter((statusCheck) => statusCheck.status == 'COMPLETED')
if (statusCheckEvents.length <= 0) {
return 0
}
let totalTime = 0
statusCheckEvents.forEach((statusCheck) => {
totalTime += new Date(statusCheck.completedAt).getTime() - new Date(statusCheck.startedAt).getTime()
})
return totalTime
}
export const GetTimeSpendInPrForLastStatusCheckRun = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const statusCheckEvents = eventTimeline
.filter((event) => event.type === 'statusCheck')
.map((event) => event.event_instance as StatusCheck)
.filter((statusCheck) => statusCheck.status == 'COMPLETED')
if (statusCheckEvents.length <= 0) {
return 0
}
let earliestStart = new Date()
let latestCompletion = new Date(0, 0, 0)
statusCheckEvents.forEach((statusCheckEvent) => {
const completedDate = new Date(statusCheckEvent.completedAt)
const startedDate = new Date(statusCheckEvent.startedAt)
if (startedDate < earliestStart) {
earliestStart = startedDate
}
if (completedDate > latestCompletion) {
latestCompletion = completedDate
}
})
return latestCompletion.getTime() - earliestStart.getTime()
}
const FilterReviewsByState = (pullRequest: IPullRequest, state: string) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const reviewEvents = eventTimeline.filter((event) => event.type === 'review')
if (reviewEvents.length <= 0) {
return []
}
const filteredReviews = reviewEvents.filter((reviewEvent) => {
const review = reviewEvent.event_instance as IPullRequestReview
return review.state === state
})
return filteredReviews
}
export const GetNumberOfCommentOnlyReviews = (pullRequest: IPullRequest) => {
return FilterReviewsByState(pullRequest, 'COMMENTED').length
}
export const GetNumberOfRequestedChangeReviews = (pullRequest: IPullRequest) => {
return FilterReviewsByState(pullRequest, 'CHANGES_REQUESTED').length
}
export const GetNumberOfApprovedReviews = (pullRequest: IPullRequest) => {
return FilterReviewsByState(pullRequest, 'APPROVED').length
}
export const GetUniqueReviewParticipants = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const reviewEvents = eventTimeline.filter((event) => event.type === 'review')
// extract unique reviewers from review events
return reviewEvents
.map((reviewEvent) => reviewEvent.event_instance as IPullRequestReview)
.map((review) => review.authorLogin)
.filter((value, index, self) => self.indexOf(value) === index)
}
export const GetUniqueCommentParticipants = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const commentEvents = eventTimeline.filter((event) => event.type === 'comment')
// extract unique commenter from review events
return commentEvents
.map((commentEvent) => commentEvent.event_instance as IPullRequestComment)
.map((comment) => comment.authorLogin)
.filter((value, index, self) => self.indexOf(value) === index)
}
export const GetUniqueCommitterParticipants = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const commitEvents = eventTimeline.filter((event) => event.type === 'commit')
// extract unique reviewers from review events
return commitEvents
.map((commitEvent) => commitEvent.event_instance as IPullRequestCommit)
.map((commit) => commit.authors.filter((author) => author.login !== null).map((author) => author.login))
.flat()
.filter((value, index, self) => self.indexOf(value) === index)
}
export const GetNumberOfActivePullRequestReviewParticipants = (pullRequest: IPullRequest) => {
const uniqueReviewers = GetUniqueReviewParticipants(pullRequest)
const uniqueCommenter = GetUniqueCommentParticipants(pullRequest)
return uniqueReviewers.concat(uniqueCommenter).filter((value, index, self) => self.indexOf(value) === index).length
}
export const GetNumberOfPullRequestCommitter = (pullRequest: IPullRequest) => {
return GetUniqueCommitterParticipants(pullRequest).length
}
export const GetTotalNumberOfParticipants = (pullRequest: IPullRequest) => {
return GetNumberOfActivePullRequestReviewParticipants(pullRequest) + GetNumberOfPullRequestCommitter(pullRequest)
}
| src/Report.Calculation.ts | philips-software-pull-request-report-action-3390d78 | [
{
"filename": "src/PullRequest.Definitions.ts",
"retrieved_chunk": " public static CreateFromJson(cliPullRequest: unknown): IPullRequest {\n const cliPullRequestObject = cliPullRequest as {\n number: number\n title: string\n createdAt: string\n updatedAt: string\n closedAt: string\n mergedAt: string\n body: string\n author: string",
"score": 0.7287963032722473
},
{
"filename": "src/Report.Generation.ts",
"retrieved_chunk": " const categoryEntries = entries.filter((entry) => entry.Info.ConfigurationCategory === measureCategory)\n categoryEntries.forEach((entry) => {\n entry.Info.Value = entry.PullRequestCallback(pr)\n })\n const rows = categoryEntries.map((entry) => ({\n Description: entry.Info.Description,\n Value: entry.Info.Value,\n }))\n return table({\n columns: [{ name: this.DescriptionHeaderLabel }, { name: this.ValueHeaderLabel }],",
"score": 0.7092421054840088
},
{
"filename": "src/PullRequest.Definitions.ts",
"retrieved_chunk": "export class FileChangeSummary implements IFileChangeSummary {\n public additions = 0\n public deletions = 0\n public commits = 0\n public changedFilesList = 0\n public static CreateFromJson(json: unknown): IFileChangeSummary {\n const jsonObject = json as { additions: number; deletions: number; commits: object[]; changedFiles: number }\n const summary = new FileChangeSummary()\n summary.additions = jsonObject['additions']\n summary.deletions = jsonObject['deletions']",
"score": 0.7087579965591431
},
{
"filename": "src/PullRequest.Definitions.ts",
"retrieved_chunk": " startedAt: string\n completedAt: string\n conclusion: string\n status: string\n name: string\n detailsUrl: string\n }\n const statusCheck = new StatusCheck()\n statusCheck.workflowName = jsonObject['workflowName']\n statusCheck.startedAt = jsonObject['startedAt']",
"score": 0.7057000398635864
},
{
"filename": "src/PullRequest.Definitions.ts",
"retrieved_chunk": " const jsonObject = json as {\n authoredDate: string\n authors: unknown[]\n committedDate: string\n messageHeadline: string\n messageBody: string\n oid: string\n }\n const commit = new PullRequestCommit()\n commit.authorDate = jsonObject['authoredDate']",
"score": 0.6993089914321899
}
] | typescript | event_instance: filteredEvents[i].event_instance,
})
} else { |
/**
* @swagger
* components:
* schemas:
* SignupRequest:
* type: object
* required:
* - email
* - password
* - name
* properties:
* name:
* type: string
* description: The user name
* email:
* type: string
* description: The user email address
* password:
* type: string
* description: The user password
* example:
* name: John Doe
* email: [email protected]
* password: password123
* LoginRequest:
* type: object
* required:
* - email
* - password
* properties:
* email:
* type: string
* description: The user email address
* password:
* type: string
* description: The user password
* example:
* email: [email protected]
* password: password123
*/
import express from 'express';
import { transferFund } from '../service';
import { protect } from '../../../middleware';
const router = express.Router();
/**
* @swagger
* /api/v1/account/transfer:
* post:
* tags:
* - Transfer
* summary: Transfer funds between accounts
* security:
* - BearerAuth: []
* requestBody:
* required: true
* content:
* application/json:
* schema:
* type: object
* properties:
* fromAccountId:
* type: string
* description: The ID of the account to transfer funds from.
* example: "123456"
* toAccountId:
* type: string
* description: The ID of the account to transfer funds to.
* example: "789012"
* amount:
* type: number
* description: The amount of funds to transfer.
* example: 1000.00
* tag:
* type: string
* description: The tag associated with the transfer.
* example: "Rent payment"
* responses:
* '200':
* description: Successful transfer of funds
* '400':
* description: Invalid request parameters
* '401':
* description: Unauthorized request
*/
| router.post('/transfer', protect, transferFund); |
export default router;
| src/modules/account/controller/index.ts | walosha-BACKEND_DEV_TESTS-db2fcb4 | [
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": " * security:\n * - bearerAuth: []\n * responses:\n * \"200\":\n * description: The user profile\n * \"401\":\n * description: Unauthorized\n */\nrouter.post('/me', protect, getMe);\nexport default router;",
"score": 0.9278050065040588
},
{
"filename": "src/modules/auth/controller/users.ts",
"retrieved_chunk": " * description: The ID of the user to delete\n * responses:\n * \"204\":\n * description: User deleted successfully\n * \"401\":\n * description: Unauthorized\n * \"404\":\n * description: User not found\n */\n// A simple case where users can only delete themselves not the admin",
"score": 0.9238468408584595
},
{
"filename": "src/modules/auth/controller/users.ts",
"retrieved_chunk": " * tags: [User]\n * security:\n * - bearerAuth: []\n * responses:\n * \"200\":\n * description: A list of users\n * content:\n * application/json:\n * schema:\n * type: array",
"score": 0.8995816707611084
},
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": " * tags: [Auth]\n * requestBody:\n * required: true\n * content:\n * application/json:\n * schema:\n * $ref: '#/components/schemas/SignupRequest'\n * responses:\n * \"200\":\n * description: The created user.",
"score": 0.8808084726333618
},
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": " * post:\n * summary: Refreshes the access token\n * tags: [Auth]\n * requestBody:\n * required: true\n * content:\n * application/json:\n * schema:\n * type: object\n * required:",
"score": 0.8787384629249573
}
] | typescript | router.post('/transfer', protect, transferFund); |
/**
* YOU PROBABLY DON'T NEED TO EDIT THIS FILE, UNLESS:
* 1. You want to modify request context (see Part 1).
* 2. You want to create a new middleware or type of procedure (see Part 3).
*
* TL;DR - This is where all the tRPC server stuff is created and plugged in. The pieces you will
* need to use are documented accordingly near the end.
*/
/**
* 1. CONTEXT
*
* This section defines the "contexts" that are available in the backend API.
*
* These allow you to access things when processing a request, like the database, the session, etc.
*/
import { type CreateNextContextOptions } from "@trpc/server/adapters/next";
import { type Session } from "next-auth";
import { getServerAuthSession } from "~/server/auth";
import { prisma } from "~/server/db";
type CreateContextOptions = {
session: Session | null;
};
/**
* This helper generates the "internals" for a tRPC context. If you need to use it, you can export
* it from here.
*
* Examples of things you may need it for:
* - testing, so we don't have to mock Next.js' req/res
* - tRPC's `createSSGHelpers`, where we don't have req/res
*
* @see https://create.t3.gg/en/usage/trpc#-serverapitrpcts
*/
const createInnerTRPCContext = (opts: CreateContextOptions) => {
return {
session: opts.session,
prisma,
};
};
/**
* This is the actual context you will use in your router. It will be used to process every request
* that goes through your tRPC endpoint.
*
* @see https://trpc.io/docs/context
*/
export const createTRPCContext = async (opts: CreateNextContextOptions) => {
const { req, res } = opts;
// Get the session from the server using the getServerSession wrapper function
const session = | await getServerAuthSession({ req, res }); |
return createInnerTRPCContext({
session,
});
};
/**
* 2. INITIALIZATION
*
* This is where the tRPC API is initialized, connecting the context and transformer. We also parse
* ZodErrors so that you get typesafety on the frontend if your procedure fails due to validation
* errors on the backend.
*/
import { initTRPC, TRPCError } from "@trpc/server";
import superjson from "superjson";
import { ZodError } from "zod";
const t = initTRPC.context<typeof createTRPCContext>().create({
transformer: superjson,
errorFormatter({ shape, error }) {
return {
...shape,
data: {
...shape.data,
zodError:
error.cause instanceof ZodError ? error.cause.flatten() : null,
},
};
},
});
/**
* 3. ROUTER & PROCEDURE (THE IMPORTANT BIT)
*
* These are the pieces you use to build your tRPC API. You should import these a lot in the
* "/src/server/api/routers" directory.
*/
/**
* This is how you create new routers and sub-routers in your tRPC API.
*
* @see https://trpc.io/docs/router
*/
export const createTRPCRouter = t.router;
/**
* Public (unauthenticated) procedure
*
* This is the base piece you use to build new queries and mutations on your tRPC API. It does not
* guarantee that a user querying is authorized, but you can still access user session data if they
* are logged in.
*/
export const publicProcedure = t.procedure;
/** Reusable middleware that enforces users are logged in before running the procedure. */
const enforceUserIsAuthed = t.middleware(({ ctx, next }) => {
if (!ctx.session || !ctx.session.user) {
throw new TRPCError({ code: "UNAUTHORIZED" });
}
return next({
ctx: {
// infers the `session` as non-nullable
session: { ...ctx.session, user: ctx.session.user },
},
});
});
/**
* Protected (authenticated) procedure
*
* If you want a query or mutation to ONLY be accessible to logged in users, use this. It verifies
* the session is valid and guarantees `ctx.session.user` is not null.
*
* @see https://trpc.io/docs/procedures
*/
export const protectedProcedure = t.procedure.use(enforceUserIsAuthed);
| src/server/api/trpc.ts | hackathon-ufrt-gptnotes-e185e8c | [
{
"filename": "src/server/auth.ts",
"retrieved_chunk": "/**\n * Options for NextAuth.js used to configure adapters, providers, callbacks, etc.\n *\n * @see https://next-auth.js.org/configuration/options\n */\nexport const authOptions: NextAuthOptions = {\n callbacks: {\n session: ({ session, user }) => ({\n ...session,\n user: {",
"score": 0.8533605933189392
},
{
"filename": "src/utils/api.ts",
"retrieved_chunk": "/**\n * This is the client-side entrypoint for your tRPC API. It is used to create the `api` object which\n * contains the Next.js App-wrapper, as well as your type-safe React Query hooks.\n *\n * We also create a few inference helpers for input and output types.\n */\nimport { httpBatchLink, loggerLink } from \"@trpc/client\";\nimport { createTRPCNext } from \"@trpc/next\";\nimport { type inferRouterInputs, type inferRouterOutputs } from \"@trpc/server\";\nimport superjson from \"superjson\";",
"score": 0.8512413501739502
},
{
"filename": "src/server/auth.ts",
"retrieved_chunk": " clientSecret: env.GOOGLE_CLIENT_SECRET,\n }),\n /**\n * ...add more providers here.\n *\n * Most other providers require a bit more work than the Discord provider. For example, the\n * GitHub provider requires you to add the `refresh_token_expires_in` field to the Account\n * model. Refer to the NextAuth.js docs for the provider you want to use. Example:\n *\n * @see https://next-auth.js.org/providers/github",
"score": 0.8273910284042358
},
{
"filename": "src/utils/api.ts",
"retrieved_chunk": " /**\n * Transformer used for data de-serialization from the server.\n *\n * @see https://trpc.io/docs/data-transformers\n */\n transformer: superjson,\n /**\n * Links used to determine request flow from client to server.\n *\n * @see https://trpc.io/docs/links",
"score": 0.826209545135498
},
{
"filename": "src/server/auth.ts",
"retrieved_chunk": " */\n ]\n};\n/**\n * Wrapper for `getServerSession` so that you don't need to import the `authOptions` in every file.\n *\n * @see https://next-auth.js.org/configuration/nextjs\n */\nexport const getServerAuthSession = (ctx: {\n req: GetServerSidePropsContext[\"req\"];",
"score": 0.8070823550224304
}
] | typescript | await getServerAuthSession({ req, res }); |
// for license and copyright look at the repository
import {
IPullRequest,
IPullRequestComment,
IPullRequestCommit,
IPullRequestReview,
} from './Interfaces/PullRequestTypes'
import { EventWithTime } from './Interfaces/ReportTypes'
import { StatusCheck } from './PullRequest.Definitions'
export const GenerateEventTimeline = (pullRequest: IPullRequest): EventWithTime[] => {
const events: EventWithTime[][] = []
// merge all interesting events into a single list
events.push([
{ type: 'createAt', date: new Date(pullRequest.createdAt), event_instance: pullRequest.createdAt, time: 0 },
])
events.push(
pullRequest.commits.map((commit) => ({
type: 'commit',
date: new Date(commit.authorDate),
event_instance: commit,
time: 0,
})),
)
events.push(
pullRequest.reviews.map((review) => ({
type: 'review',
date: new Date(review.submittedAt),
event_instance: review,
time: 0,
})),
)
events.push(
pullRequest.statusChecks.map((statusCheck) => ({
type: 'statusCheck',
date: new Date(statusCheck.completedAt),
event_instance: statusCheck,
time: 0,
})),
)
events.push(
pullRequest.comments.map((comment) => ({
type: 'comment',
date: new Date(comment.createdAt),
event_instance: comment,
time: 0,
})),
)
events.push([
{ type: 'mergedAt', date: new Date(pullRequest.mergedAt), event_instance: pullRequest.mergedAt, time: 0 },
])
events.push([
{ type: 'closedAt', date: new Date(pullRequest.closedAt), event_instance: pullRequest.closedAt, time: 0 },
])
// flatten the list
const flattenedEvents = events.flat()
// filter out events that don't have a valid date
const filteredEvents = flattenedEvents.filter((event) => event.date !== null)
// sort the events by date
filteredEvents.sort((a, b) => a.date.getTime() - b.date.getTime())
// now, create a list of events with the time between events
const eventsWithTime: EventWithTime[] = []
// calculate the time between events
for (let i = 0; i < filteredEvents.length; i++) {
if (i === 0) {
eventsWithTime.push({
type: filteredEvents[i].type,
date: filteredEvents[i].date,
time: 0,
event_instance: filteredEvents[i].event_instance,
})
} else {
eventsWithTime.push({
type: filteredEvents[i].type,
date: filteredEvents[i].date,
time: (filteredEvents[i].date.getTime() - filteredEvents[i - 1].date.getTime()) / 1000,
event_instance: filteredEvents[i].event_instance,
})
}
}
return eventsWithTime
}
export const MillisecondsToReadableDuration = (leadTimeInMSec: number) => {
const seconds = +(leadTimeInMSec / 1000).toFixed(1)
const minutes = +(leadTimeInMSec / (1000 * 60)).toFixed(1)
const hours = +(leadTimeInMSec / (1000 * 60 * 60)).toFixed(1)
const days = +(leadTimeInMSec / (1000 * 60 * 60 * 24)).toFixed(1)
if (seconds < 60) return `${seconds} Sec`
else if (minutes < 60) return `${minutes} Min`
else if (hours < 24) return `${hours} Hours`
else return `${days} Days`
}
export const GetMergedOrClosedDate = (pullRequest: IPullRequest): string => {
let mergedOrClosedAt = pullRequest.mergedAt
if (mergedOrClosedAt == null) mergedOrClosedAt = pullRequest.closedAt
return mergedOrClosedAt
}
export const GetLeadTimeForPullRequest = (pullRequest: IPullRequest) => {
// parse createAt as date from string
const createAt = new Date(pullRequest.createdAt)
const mergedOrClosedAt = new Date(GetMergedOrClosedDate(pullRequest))
const duration = mergedOrClosedAt.getTime() - createAt.getTime()
if (duration <= 0 || isNaN(duration)) return 0
return duration
}
export const GetTimeSpendOnBranchBeforePRCreated = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const createAtEvent = eventTimeline.find((event) => event.type === 'createAt')
const firstCommitEvent = eventTimeline.find((event) => event.type === 'commit')
if (!createAtEvent || !firstCommitEvent) return 0
const duration = createAtEvent.date.getTime() - firstCommitEvent.date.getTime()
if (duration <= 0 || isNaN(duration)) return 0
return duration
}
export const GetTimeSpendOnBranchBeforePRMerged = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const mergedAtEvent = eventTimeline.find((event) => event.type === 'mergedAt')
const firstCommitEvent = eventTimeline.find((event) => event.type === 'commit')
if (mergedAtEvent && firstCommitEvent && mergedAtEvent.date.getTime() > firstCommitEvent.date.getTime()) {
return mergedAtEvent.date.getTime() - firstCommitEvent.date.getTime()
}
return -1
}
export const GetTimeToMergeAfterLastReview = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const mergedAtEvent = eventTimeline.find((event) => event.type === 'mergedAt')
const reviewEvents = eventTimeline.filter((event) => event.type === 'review')
if (reviewEvents.length <= 0) {
return -1
}
const lastReviewEvent = reviewEvents.reverse()[0]
if (mergedAtEvent && lastReviewEvent && mergedAtEvent.date.getTime() > lastReviewEvent.date.getTime()) {
return mergedAtEvent.date.getTime() - lastReviewEvent.date.getTime()
}
return -1
}
export const GetTotalRuntimeForLastStatusCheckRun = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const statusCheckEvents = eventTimeline
.filter((event) => event.type === 'statusCheck')
.map((event) => event.event_instance as StatusCheck)
.filter((statusCheck) => statusCheck.status == 'COMPLETED')
if (statusCheckEvents.length <= 0) {
return 0
}
let totalTime = 0
statusCheckEvents.forEach((statusCheck) => {
totalTime += new Date( | statusCheck.completedAt).getTime() - new Date(statusCheck.startedAt).getTime()
})
return totalTime
} |
export const GetTimeSpendInPrForLastStatusCheckRun = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const statusCheckEvents = eventTimeline
.filter((event) => event.type === 'statusCheck')
.map((event) => event.event_instance as StatusCheck)
.filter((statusCheck) => statusCheck.status == 'COMPLETED')
if (statusCheckEvents.length <= 0) {
return 0
}
let earliestStart = new Date()
let latestCompletion = new Date(0, 0, 0)
statusCheckEvents.forEach((statusCheckEvent) => {
const completedDate = new Date(statusCheckEvent.completedAt)
const startedDate = new Date(statusCheckEvent.startedAt)
if (startedDate < earliestStart) {
earliestStart = startedDate
}
if (completedDate > latestCompletion) {
latestCompletion = completedDate
}
})
return latestCompletion.getTime() - earliestStart.getTime()
}
const FilterReviewsByState = (pullRequest: IPullRequest, state: string) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const reviewEvents = eventTimeline.filter((event) => event.type === 'review')
if (reviewEvents.length <= 0) {
return []
}
const filteredReviews = reviewEvents.filter((reviewEvent) => {
const review = reviewEvent.event_instance as IPullRequestReview
return review.state === state
})
return filteredReviews
}
export const GetNumberOfCommentOnlyReviews = (pullRequest: IPullRequest) => {
return FilterReviewsByState(pullRequest, 'COMMENTED').length
}
export const GetNumberOfRequestedChangeReviews = (pullRequest: IPullRequest) => {
return FilterReviewsByState(pullRequest, 'CHANGES_REQUESTED').length
}
export const GetNumberOfApprovedReviews = (pullRequest: IPullRequest) => {
return FilterReviewsByState(pullRequest, 'APPROVED').length
}
export const GetUniqueReviewParticipants = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const reviewEvents = eventTimeline.filter((event) => event.type === 'review')
// extract unique reviewers from review events
return reviewEvents
.map((reviewEvent) => reviewEvent.event_instance as IPullRequestReview)
.map((review) => review.authorLogin)
.filter((value, index, self) => self.indexOf(value) === index)
}
export const GetUniqueCommentParticipants = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const commentEvents = eventTimeline.filter((event) => event.type === 'comment')
// extract unique commenter from review events
return commentEvents
.map((commentEvent) => commentEvent.event_instance as IPullRequestComment)
.map((comment) => comment.authorLogin)
.filter((value, index, self) => self.indexOf(value) === index)
}
export const GetUniqueCommitterParticipants = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const commitEvents = eventTimeline.filter((event) => event.type === 'commit')
// extract unique reviewers from review events
return commitEvents
.map((commitEvent) => commitEvent.event_instance as IPullRequestCommit)
.map((commit) => commit.authors.filter((author) => author.login !== null).map((author) => author.login))
.flat()
.filter((value, index, self) => self.indexOf(value) === index)
}
export const GetNumberOfActivePullRequestReviewParticipants = (pullRequest: IPullRequest) => {
const uniqueReviewers = GetUniqueReviewParticipants(pullRequest)
const uniqueCommenter = GetUniqueCommentParticipants(pullRequest)
return uniqueReviewers.concat(uniqueCommenter).filter((value, index, self) => self.indexOf(value) === index).length
}
export const GetNumberOfPullRequestCommitter = (pullRequest: IPullRequest) => {
return GetUniqueCommitterParticipants(pullRequest).length
}
export const GetTotalNumberOfParticipants = (pullRequest: IPullRequest) => {
return GetNumberOfActivePullRequestReviewParticipants(pullRequest) + GetNumberOfPullRequestCommitter(pullRequest)
}
| src/Report.Calculation.ts | philips-software-pull-request-report-action-3390d78 | [
{
"filename": "src/Report.Generation.ts",
"retrieved_chunk": " const categoryEntries = entries.filter((entry) => entry.Info.ConfigurationCategory === measureCategory)\n categoryEntries.forEach((entry) => {\n entry.Info.Value = entry.PullRequestCallback(pr)\n })\n const rows = categoryEntries.map((entry) => ({\n Description: entry.Info.Description,\n Value: entry.Info.Value,\n }))\n return table({\n columns: [{ name: this.DescriptionHeaderLabel }, { name: this.ValueHeaderLabel }],",
"score": 0.6808984279632568
},
{
"filename": "src/Report.Generation.ts",
"retrieved_chunk": " const table = this.GenerateMeasureTable(pr, report)\n const reportElements = [header, ...table]\n return tsMarkdown(reportElements)\n }\n public GenerateHeader(pr: IPullRequest, report: IReport): H1Entry {\n const title = { h1: `${report.Description} (#${pr.id})` }\n return title\n }\n public GetMeasurementEntries(entries: IReportConfigurationEntry[]): IReportConfigurationEntry[] {\n if (entries !== undefined && entries !== null && entries.length > 0) {",
"score": 0.6731629371643066
},
{
"filename": "src/Report.Generation.ts",
"retrieved_chunk": " return entries.filter((entry) => ConfigurationCategory[entry.Info.ConfigurationCategory].endsWith('Measures'))\n }\n return []\n }\n public GenerateMeasureTable(pr: IPullRequest, report: IReport): MarkdownEntry[] {\n const tables: MarkdownEntry[] = []\n const entries = this.GetMeasurementEntries(report.Entries)\n const categories = new Set(entries.map((entry) => entry.Info.ConfigurationCategory))\n categories.forEach((category) => {\n tables.push(this.GenerateCategoryTitle(category))",
"score": 0.668480396270752
},
{
"filename": "src/Report.Measures.ts",
"retrieved_chunk": " // get the property value of inputs\n entry.Info.ConfigValue = (configValues as { [key: string]: string | number })[entry.Info.ConfigurationName]\n })\n return measurementEntries\n}\nexport const GetActiveMeasures = (entries: Array<ReportConfigurationEntry>): Array<ReportConfigurationEntry> => {\n return entries.filter((entry) => entry.Info.ConfigValue === 'yes')\n}\nexport const ReportConfigurationTable = new Array<ReportConfigurationEntry>()\nReportConfigurationTable.push(",
"score": 0.6667159199714661
},
{
"filename": "src/PullRequest.Definitions.ts",
"retrieved_chunk": " startedAt: string\n completedAt: string\n conclusion: string\n status: string\n name: string\n detailsUrl: string\n }\n const statusCheck = new StatusCheck()\n statusCheck.workflowName = jsonObject['workflowName']\n statusCheck.startedAt = jsonObject['startedAt']",
"score": 0.6660475134849548
}
] | typescript | statusCheck.completedAt).getTime() - new Date(statusCheck.startedAt).getTime()
})
return totalTime
} |
// for license and copyright look at the repository
import { IReport, IReportConfigurationEntry, IReportConfigInfo, PullRequestCallback } from './Interfaces/ReportTypes'
export enum ConfigurationCategory {
None,
StaticMeasures,
TimeRelatedMeasures,
StatusCheckRelatedMeasures,
ReportGeneratorValue,
}
export const ConfigurationCategoryTitleMap = new Map<ConfigurationCategory, string>([
[ConfigurationCategory.None, 'None'],
[ConfigurationCategory.StaticMeasures, 'Static measures'],
[ConfigurationCategory.TimeRelatedMeasures, 'Time related measures'],
[ConfigurationCategory.StatusCheckRelatedMeasures, 'Status check related measures'],
[ConfigurationCategory.ReportGeneratorValue, 'Report generator related predefined strings'],
])
export class ConfigurationInfo implements IReportConfigInfo {
public Description
public PresentationValue
public Value
public ConfigurationName
public ConfigValue
public ConfigurationCategory
constructor(
label: string,
presentationValue: string | number,
value: string | number,
configName: string,
defaultConfigValue: string | number,
configurationCategory: ConfigurationCategory,
) {
this.Description = label
this.PresentationValue = presentationValue
this.Value = value
this.ConfigurationName = configName
this.ConfigValue = defaultConfigValue
this.ConfigurationCategory = configurationCategory
}
}
export class ReportConfigurationEntry implements IReportConfigurationEntry {
public Id
public Info
| public PullRequestCallback: PullRequestCallback
constructor(id = '', info: IReportConfigInfo, measureCallback: PullRequestCallback = () => '') { |
this.Id = id
this.Info = info
this.PullRequestCallback = measureCallback
}
}
export class Report implements IReport {
public Id = ''
public Description = ''
public Entries: ReportConfigurationEntry[] = []
}
| src/Report.Definitions.ts | philips-software-pull-request-report-action-3390d78 | [
{
"filename": "src/Interfaces/ReportTypes.ts",
"retrieved_chunk": " ConfigValue: string | number\n}\nexport interface IReportConfigurationEntry {\n Id: string\n Info: IReportConfigInfo\n PullRequestCallback: PullRequestCallback\n}\nexport interface IReport {\n Id: string\n Description: string",
"score": 0.9128432273864746
},
{
"filename": "src/Interfaces/ReportTypes.ts",
"retrieved_chunk": "// for license and copyright look at the repository\nimport { ConfigurationCategory } from '../Report.Definitions'\nimport { IPullRequest } from './PullRequestTypes'\nexport type PullRequestCallback = (pr: IPullRequest) => string | number\nexport interface IReportConfigInfo {\n Description: string\n PresentationValue: string | number\n Value: string | number\n ConfigurationName: string\n ConfigurationCategory: ConfigurationCategory",
"score": 0.8860822916030884
},
{
"filename": "src/Report.Generation.ts",
"retrieved_chunk": "// for license and copyright look at the repository\nimport { IPullRequest } from './Interfaces/PullRequestTypes'\nimport { IReport, IReportConfigurationEntry } from './Interfaces/ReportTypes'\nimport { tsMarkdown, table, TableEntry, H1Entry, H3Entry, MarkdownEntry } from 'ts-markdown'\nimport { ConfigurationCategory, ConfigurationCategoryTitleMap } from './Report.Definitions'\nexport class ReportGenerator {\n DescriptionHeaderLabel = 'Description'\n ValueHeaderLabel = 'Value'\n public Generate(pr: IPullRequest, report: IReport): string {\n const header = this.GenerateHeader(pr, report)",
"score": 0.8436518311500549
},
{
"filename": "src/PullRequest.Definitions.ts",
"retrieved_chunk": "}\nexport class PullRequestCommit implements IPullRequestCommit {\n public authors: ICommitAuthor[] = []\n public committer = ''\n public authorDate = ''\n public commitDate = ''\n public commitHeader = ''\n public commitBody = ''\n public commitId = ''\n public static CreateFromJson(json: unknown): IPullRequestCommit {",
"score": 0.8194235563278198
},
{
"filename": "src/run.ts",
"retrieved_chunk": " report.Entries = activeConfigValues\n report.Description = 'Test report'\n report.Id = pullRequestDataModel.id.toString()\n return report\n}\nconst IsConfigValueYes = (configValue: string): boolean => {\n return configValue.trim().toLowerCase() === 'yes'\n}\nexport const run = async (inputsFromWorkflow: ConfigurationInputs): Promise<number> => {\n // take care that action is running only in PR context",
"score": 0.8173242211341858
}
] | typescript | public PullRequestCallback: PullRequestCallback
constructor(id = '', info: IReportConfigInfo, measureCallback: PullRequestCallback = () => '') { |
/**
* @swagger
* components:
* schemas:
* User:
* type: object
* required:
* - name
* - email
* properties:
* name:
* type: string
* description: The user name
* email:
* type: string
* format: email
* description: The user email address
* password:
* type: string
* description: The user password (hashed)
* role:
* type: string
* enum: [user, admin]
* description: The user role
* default: user
* example:
* name: John Doe
* email: [email protected]
* password: $2a$10$gR06R4K1NM4p4b4ELq.LlOTzq3Dcxj2iPwE5U/O2MDE70o9noemhO
* role: user
*/
import express from 'express';
import { deleteUser, fetchUsers } from '../service';
import { protect, restrictTo } from '../../../middleware';
const router = express.Router();
/**
* @swagger
* /api/v1/users:
* get:
* summary: Retrieve all users
* tags: [User]
* security:
* - bearerAuth: []
* responses:
* "200":
* description: A list of users
* content:
* application/json:
* schema:
* type: array
* items:
* $ref: '#/components/schemas/User'
* "401":
* description: Unauthorized
*/
router. | get('/', protect, restrictTo('admin'), fetchUsers); |
/**
* @swagger
* /api/v1/users/{id}:
* delete:
* summary: Delete a user by ID
* tags: [User]
* security:
* - bearerAuth: []
* parameters:
* - in: path
* name: id
* schema:
* type: string
* required: true
* description: The ID of the user to delete
* responses:
* "204":
* description: User deleted successfully
* "401":
* description: Unauthorized
* "404":
* description: User not found
*/
// A simple case where users can only delete themselves not the admin
router.delete('/:id', restrictTo('user'), deleteUser);
export default router;
| src/modules/auth/controller/users.ts | walosha-BACKEND_DEV_TESTS-db2fcb4 | [
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": " * description: The authenticated user.\n * content:\n * application/json:\n * schema:\n * $ref: '#/components/schemas/User'\n */\nrouter.post('/login', login);\n/**\n * @swagger\n * /api/v1/auth/refresh:",
"score": 0.8963272571563721
},
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": " * tags: [Auth]\n * requestBody:\n * required: true\n * content:\n * application/json:\n * schema:\n * $ref: '#/components/schemas/SignupRequest'\n * responses:\n * \"200\":\n * description: The created user.",
"score": 0.8891136646270752
},
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": " * summary: Login User\n * tags: [Auth]\n * requestBody:\n * required: true\n * content:\n * application/json:\n * schema:\n * $ref: '#/components/schemas/LoginRequest'\n * responses:\n * \"200\":",
"score": 0.8864021897315979
},
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": " * \"401\":\n * description: Invalid or expired token or refresh token was already used\n */\nrouter.post('/refresh', refreshMiddleware, refresh);\n/**\n * @swagger\n * /api/v1/auth/me:\n * post:\n * summary: Get user profile\n * tags: [Auth]",
"score": 0.877558708190918
},
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": " * security:\n * - bearerAuth: []\n * responses:\n * \"200\":\n * description: The user profile\n * \"401\":\n * description: Unauthorized\n */\nrouter.post('/me', protect, getMe);\nexport default router;",
"score": 0.8757235407829285
}
] | typescript | get('/', protect, restrictTo('admin'), fetchUsers); |
// for license and copyright look at the repository
import {
IPullRequest,
IPullRequestComment,
IPullRequestCommit,
IPullRequestReview,
} from './Interfaces/PullRequestTypes'
import { EventWithTime } from './Interfaces/ReportTypes'
import { StatusCheck } from './PullRequest.Definitions'
export const GenerateEventTimeline = (pullRequest: IPullRequest): EventWithTime[] => {
const events: EventWithTime[][] = []
// merge all interesting events into a single list
events.push([
{ type: 'createAt', date: new Date(pullRequest.createdAt), event_instance: pullRequest.createdAt, time: 0 },
])
events.push(
pullRequest.commits.map((commit) => ({
type: 'commit',
date: new Date(commit.authorDate),
event_instance: commit,
time: 0,
})),
)
events.push(
pullRequest.reviews.map((review) => ({
type: 'review',
date: new Date(review.submittedAt),
event_instance: review,
time: 0,
})),
)
events.push(
pullRequest.statusChecks.map((statusCheck) => ({
type: 'statusCheck',
date: new Date(statusCheck.completedAt),
event_instance: statusCheck,
time: 0,
})),
)
events.push(
pullRequest.comments.map((comment) => ({
type: 'comment',
date: new Date(comment.createdAt),
event_instance: comment,
time: 0,
})),
)
events.push([
{ type: 'mergedAt', date: new Date(pullRequest.mergedAt), event_instance: pullRequest.mergedAt, time: 0 },
])
events.push([
{ type: 'closedAt', date: new Date(pullRequest.closedAt), event_instance: pullRequest.closedAt, time: 0 },
])
// flatten the list
const flattenedEvents = events.flat()
// filter out events that don't have a valid date
const filteredEvents = flattenedEvents.filter((event) => event.date !== null)
// sort the events by date
filteredEvents.sort((a, b) => a.date.getTime() - b.date.getTime())
// now, create a list of events with the time between events
const eventsWithTime: EventWithTime[] = []
// calculate the time between events
for (let i = 0; i < filteredEvents.length; i++) {
if (i === 0) {
eventsWithTime.push({
type: filteredEvents[i].type,
date: filteredEvents[i].date,
time: 0,
event_instance: filteredEvents[i].event_instance,
})
} else {
eventsWithTime.push({
type: filteredEvents[i].type,
date: filteredEvents[i].date,
time: (filteredEvents[i].date.getTime() - filteredEvents[i - 1].date.getTime()) / 1000,
event_instance: filteredEvents[i].event_instance,
})
}
}
return eventsWithTime
}
export const MillisecondsToReadableDuration = (leadTimeInMSec: number) => {
const seconds = +(leadTimeInMSec / 1000).toFixed(1)
const minutes = +(leadTimeInMSec / (1000 * 60)).toFixed(1)
const hours = +(leadTimeInMSec / (1000 * 60 * 60)).toFixed(1)
const days = +(leadTimeInMSec / (1000 * 60 * 60 * 24)).toFixed(1)
if (seconds < 60) return `${seconds} Sec`
else if (minutes < 60) return `${minutes} Min`
else if (hours < 24) return `${hours} Hours`
else return `${days} Days`
}
export const GetMergedOrClosedDate = (pullRequest: IPullRequest): string => {
let mergedOrClosedAt = pullRequest.mergedAt
if (mergedOrClosedAt == null) mergedOrClosedAt = pullRequest.closedAt
return mergedOrClosedAt
}
export const GetLeadTimeForPullRequest = (pullRequest: IPullRequest) => {
// parse createAt as date from string
const createAt = new Date(pullRequest.createdAt)
const mergedOrClosedAt = new Date(GetMergedOrClosedDate(pullRequest))
const duration = mergedOrClosedAt.getTime() - createAt.getTime()
if (duration <= 0 || isNaN(duration)) return 0
return duration
}
export const GetTimeSpendOnBranchBeforePRCreated = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const createAtEvent = eventTimeline.find((event) => event.type === 'createAt')
const firstCommitEvent = eventTimeline.find((event) => event.type === 'commit')
if (!createAtEvent || !firstCommitEvent) return 0
const duration = createAtEvent.date.getTime() - firstCommitEvent.date.getTime()
if (duration <= 0 || isNaN(duration)) return 0
return duration
}
export const GetTimeSpendOnBranchBeforePRMerged = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const mergedAtEvent = eventTimeline.find((event) => event.type === 'mergedAt')
const firstCommitEvent = eventTimeline.find((event) => event.type === 'commit')
if (mergedAtEvent && firstCommitEvent && mergedAtEvent.date.getTime() > firstCommitEvent.date.getTime()) {
return mergedAtEvent.date.getTime() - firstCommitEvent.date.getTime()
}
return -1
}
export const GetTimeToMergeAfterLastReview = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const mergedAtEvent = eventTimeline.find((event) => event.type === 'mergedAt')
const reviewEvents = eventTimeline.filter((event) => event.type === 'review')
if (reviewEvents.length <= 0) {
return -1
}
const lastReviewEvent = reviewEvents.reverse()[0]
if (mergedAtEvent && lastReviewEvent && mergedAtEvent.date.getTime() > lastReviewEvent.date.getTime()) {
return mergedAtEvent.date.getTime() - lastReviewEvent.date.getTime()
}
return -1
}
export const GetTotalRuntimeForLastStatusCheckRun = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const statusCheckEvents = eventTimeline
.filter((event) => event.type === 'statusCheck')
.map((event) => event.event_instance as StatusCheck)
.filter((statusCheck) => | statusCheck.status == 'COMPLETED')
if (statusCheckEvents.length <= 0) { |
return 0
}
let totalTime = 0
statusCheckEvents.forEach((statusCheck) => {
totalTime += new Date(statusCheck.completedAt).getTime() - new Date(statusCheck.startedAt).getTime()
})
return totalTime
}
export const GetTimeSpendInPrForLastStatusCheckRun = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const statusCheckEvents = eventTimeline
.filter((event) => event.type === 'statusCheck')
.map((event) => event.event_instance as StatusCheck)
.filter((statusCheck) => statusCheck.status == 'COMPLETED')
if (statusCheckEvents.length <= 0) {
return 0
}
let earliestStart = new Date()
let latestCompletion = new Date(0, 0, 0)
statusCheckEvents.forEach((statusCheckEvent) => {
const completedDate = new Date(statusCheckEvent.completedAt)
const startedDate = new Date(statusCheckEvent.startedAt)
if (startedDate < earliestStart) {
earliestStart = startedDate
}
if (completedDate > latestCompletion) {
latestCompletion = completedDate
}
})
return latestCompletion.getTime() - earliestStart.getTime()
}
const FilterReviewsByState = (pullRequest: IPullRequest, state: string) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const reviewEvents = eventTimeline.filter((event) => event.type === 'review')
if (reviewEvents.length <= 0) {
return []
}
const filteredReviews = reviewEvents.filter((reviewEvent) => {
const review = reviewEvent.event_instance as IPullRequestReview
return review.state === state
})
return filteredReviews
}
export const GetNumberOfCommentOnlyReviews = (pullRequest: IPullRequest) => {
return FilterReviewsByState(pullRequest, 'COMMENTED').length
}
export const GetNumberOfRequestedChangeReviews = (pullRequest: IPullRequest) => {
return FilterReviewsByState(pullRequest, 'CHANGES_REQUESTED').length
}
export const GetNumberOfApprovedReviews = (pullRequest: IPullRequest) => {
return FilterReviewsByState(pullRequest, 'APPROVED').length
}
export const GetUniqueReviewParticipants = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const reviewEvents = eventTimeline.filter((event) => event.type === 'review')
// extract unique reviewers from review events
return reviewEvents
.map((reviewEvent) => reviewEvent.event_instance as IPullRequestReview)
.map((review) => review.authorLogin)
.filter((value, index, self) => self.indexOf(value) === index)
}
export const GetUniqueCommentParticipants = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const commentEvents = eventTimeline.filter((event) => event.type === 'comment')
// extract unique commenter from review events
return commentEvents
.map((commentEvent) => commentEvent.event_instance as IPullRequestComment)
.map((comment) => comment.authorLogin)
.filter((value, index, self) => self.indexOf(value) === index)
}
export const GetUniqueCommitterParticipants = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const commitEvents = eventTimeline.filter((event) => event.type === 'commit')
// extract unique reviewers from review events
return commitEvents
.map((commitEvent) => commitEvent.event_instance as IPullRequestCommit)
.map((commit) => commit.authors.filter((author) => author.login !== null).map((author) => author.login))
.flat()
.filter((value, index, self) => self.indexOf(value) === index)
}
export const GetNumberOfActivePullRequestReviewParticipants = (pullRequest: IPullRequest) => {
const uniqueReviewers = GetUniqueReviewParticipants(pullRequest)
const uniqueCommenter = GetUniqueCommentParticipants(pullRequest)
return uniqueReviewers.concat(uniqueCommenter).filter((value, index, self) => self.indexOf(value) === index).length
}
export const GetNumberOfPullRequestCommitter = (pullRequest: IPullRequest) => {
return GetUniqueCommitterParticipants(pullRequest).length
}
export const GetTotalNumberOfParticipants = (pullRequest: IPullRequest) => {
return GetNumberOfActivePullRequestReviewParticipants(pullRequest) + GetNumberOfPullRequestCommitter(pullRequest)
}
| src/Report.Calculation.ts | philips-software-pull-request-report-action-3390d78 | [
{
"filename": "src/run.ts",
"retrieved_chunk": " if (process.env.GITHUB_EVENT_NAME !== 'pull_request') {\n core.setFailed('Action is running outside of PR context')\n return 0\n }\n UpdateConfig(inputsFromWorkflow, ReportConfigurationTable)\n const activeConfigValues = GetActiveMeasures(ReportConfigurationTable)\n // get PR data from github cli\n const cliPullRequestData = await GetPullRequestData(github.context.issue.number)\n const cliPullRequestDataAsString = SanitizeMarkdownComment(JSON.stringify(cliPullRequestData))\n // transform PR data to a typed model",
"score": 0.7648374438285828
},
{
"filename": "src/Report.Generation.ts",
"retrieved_chunk": " return entries.filter((entry) => ConfigurationCategory[entry.Info.ConfigurationCategory].endsWith('Measures'))\n }\n return []\n }\n public GenerateMeasureTable(pr: IPullRequest, report: IReport): MarkdownEntry[] {\n const tables: MarkdownEntry[] = []\n const entries = this.GetMeasurementEntries(report.Entries)\n const categories = new Set(entries.map((entry) => entry.Info.ConfigurationCategory))\n categories.forEach((category) => {\n tables.push(this.GenerateCategoryTitle(category))",
"score": 0.7476712465286255
},
{
"filename": "src/Report.Functions.ts",
"retrieved_chunk": "}\nexport const GetCommitsCount = (pr: IPullRequest): number => {\n return pr.fileChangeSummary.commits\n}\nexport const GetReviewCount = (pr: IPullRequest): number => {\n return pr.reviews.length\n}\nexport const GetCommentCount = (pr: IPullRequest): number => {\n return pr.comments.length\n}",
"score": 0.7471026182174683
},
{
"filename": "src/PullRequest.Definitions.ts",
"retrieved_chunk": " PullRequestComment.CreateFromJson(comment),\n )\n pr.statusChecks = ParseArrayOfType<IStatusCheck>(cliPullRequestObject['statusCheckRollup'], (statusCheck) =>\n StatusCheck.CreateFromJson(statusCheck),\n )\n pr.fileChangeSummary = FileChangeSummary.CreateFromJson(cliPullRequestObject)\n return pr\n }\n}",
"score": 0.7427306175231934
},
{
"filename": "src/run.ts",
"retrieved_chunk": " report.Entries = activeConfigValues\n report.Description = 'Test report'\n report.Id = pullRequestDataModel.id.toString()\n return report\n}\nconst IsConfigValueYes = (configValue: string): boolean => {\n return configValue.trim().toLowerCase() === 'yes'\n}\nexport const run = async (inputsFromWorkflow: ConfigurationInputs): Promise<number> => {\n // take care that action is running only in PR context",
"score": 0.7401059865951538
}
] | typescript | statusCheck.status == 'COMPLETED')
if (statusCheckEvents.length <= 0) { |
/**
* @swagger
* components:
* schemas:
* SignupRequest:
* type: object
* required:
* - email
* - password
* - name
* properties:
* name:
* type: string
* description: The user name
* email:
* type: string
* description: The user email address
* password:
* type: string
* description: The user password
* example:
* name: John Doe
* email: [email protected]
* password: password123
* LoginRequest:
* type: object
* required:
* - email
* - password
* properties:
* email:
* type: string
* description: The user email address
* password:
* type: string
* description: The user password
* example:
* email: [email protected]
* password: password123
*/
import express from 'express';
import { getMe, login, refresh, signup } from '../service';
import { refreshMiddleware } from '../../../middleware/refresh';
import { protect } from '../../../middleware';
const router = express.Router();
/**
* @swagger
* /api/v1/auth/signup:
* post:
* summary: Creates an account
* tags: [Auth]
* requestBody:
* required: true
* content:
* application/json:
* schema:
* $ref: '#/components/schemas/SignupRequest'
* responses:
* "200":
* description: The created user.
* content:
* application/json:
* schema:
* $ref: '#/components/schemas/User'
*/
router.post('/signup', signup);
/**
* @swagger
* /api/v1/auth/login:
* post:
* summary: Login User
* tags: [Auth]
* requestBody:
* required: true
* content:
* application/json:
* schema:
* $ref: '#/components/schemas/LoginRequest'
* responses:
* "200":
* description: The authenticated user.
* content:
* application/json:
* schema:
* $ref: '#/components/schemas/User'
*/
router.post('/login', login);
/**
* @swagger
* /api/v1/auth/refresh:
* post:
* summary: Refreshes the access token
* tags: [Auth]
* requestBody:
* required: true
* content:
* application/json:
* schema:
* type: object
* required:
* - refresh
* properties:
* refresh:
* type: string
* description: Refresh token
* example: eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6IjY0NGYwMjg0MWRmNGJlYzliOWI3ZjlhYSIsImlhdCI6MTY4Mjg5OTU4OCwiZXhwIjoxNjgzMDcyMzg4fQ.Bt2kzyxyUEtUy9pLvr0zSzpI8_xTaM6KulO2mwYztbQ
* responses:
* "200":
* description: The new access token
* content:
* application/json:
* schema:
* type: object
* properties:
* accessToken:
* type: string
* description: Access token
* example: eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJKb2huIERvZSIsImlhdCI6MTUxNjIzOTAyMn0.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c
* "400":
* description: Invalid request or refresh token is not present
* "401":
* description: Invalid or expired token or refresh token was already used
*/
router.post('/refresh', refreshMiddleware, refresh);
/**
* @swagger
* /api/v1/auth/me:
* post:
* summary: Get user profile
* tags: [Auth]
* security:
* - bearerAuth: []
* responses:
* "200":
* description: The user profile
* "401":
* description: Unauthorized
*/
router.post(' | /me', protect, getMe); |
export default router;
| src/modules/auth/controller/index.ts | walosha-BACKEND_DEV_TESTS-db2fcb4 | [
{
"filename": "src/modules/auth/controller/users.ts",
"retrieved_chunk": " * tags: [User]\n * security:\n * - bearerAuth: []\n * responses:\n * \"200\":\n * description: A list of users\n * content:\n * application/json:\n * schema:\n * type: array",
"score": 0.9147681593894958
},
{
"filename": "src/modules/auth/controller/users.ts",
"retrieved_chunk": " * description: The ID of the user to delete\n * responses:\n * \"204\":\n * description: User deleted successfully\n * \"401\":\n * description: Unauthorized\n * \"404\":\n * description: User not found\n */\n// A simple case where users can only delete themselves not the admin",
"score": 0.9071295857429504
},
{
"filename": "src/modules/account/controller/index.ts",
"retrieved_chunk": " * summary: Transfer funds between accounts\n * security:\n * - BearerAuth: []\n * requestBody:\n * required: true\n * content:\n * application/json:\n * schema:\n * type: object\n * properties:",
"score": 0.8706586360931396
},
{
"filename": "src/modules/auth/controller/users.ts",
"retrieved_chunk": " * summary: Delete a user by ID\n * tags: [User]\n * security:\n * - bearerAuth: []\n * parameters:\n * - in: path\n * name: id\n * schema:\n * type: string\n * required: true",
"score": 0.848433792591095
},
{
"filename": "src/modules/account/controller/index.ts",
"retrieved_chunk": " * email:\n * type: string\n * description: The user email address\n * password:\n * type: string\n * description: The user password\n * example:\n * email: [email protected]\n * password: password123\n */",
"score": 0.8470423221588135
}
] | typescript | /me', protect, getMe); |
/**
* @swagger
* components:
* schemas:
* User:
* type: object
* required:
* - name
* - email
* properties:
* name:
* type: string
* description: The user name
* email:
* type: string
* format: email
* description: The user email address
* password:
* type: string
* description: The user password (hashed)
* role:
* type: string
* enum: [user, admin]
* description: The user role
* default: user
* example:
* name: John Doe
* email: [email protected]
* password: $2a$10$gR06R4K1NM4p4b4ELq.LlOTzq3Dcxj2iPwE5U/O2MDE70o9noemhO
* role: user
*/
import express from 'express';
import { deleteUser, fetchUsers } from '../service';
import { protect, restrictTo } from '../../../middleware';
const router = express.Router();
/**
* @swagger
* /api/v1/users:
* get:
* summary: Retrieve all users
* tags: [User]
* security:
* - bearerAuth: []
* responses:
* "200":
* description: A list of users
* content:
* application/json:
* schema:
* type: array
* items:
* $ref: '#/components/schemas/User'
* "401":
* description: Unauthorized
*/
router.get | ('/', protect, restrictTo('admin'), fetchUsers); |
/**
* @swagger
* /api/v1/users/{id}:
* delete:
* summary: Delete a user by ID
* tags: [User]
* security:
* - bearerAuth: []
* parameters:
* - in: path
* name: id
* schema:
* type: string
* required: true
* description: The ID of the user to delete
* responses:
* "204":
* description: User deleted successfully
* "401":
* description: Unauthorized
* "404":
* description: User not found
*/
// A simple case where users can only delete themselves not the admin
router.delete('/:id', restrictTo('user'), deleteUser);
export default router;
| src/modules/auth/controller/users.ts | walosha-BACKEND_DEV_TESTS-db2fcb4 | [
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": " * description: The authenticated user.\n * content:\n * application/json:\n * schema:\n * $ref: '#/components/schemas/User'\n */\nrouter.post('/login', login);\n/**\n * @swagger\n * /api/v1/auth/refresh:",
"score": 0.8951948285102844
},
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": " * tags: [Auth]\n * requestBody:\n * required: true\n * content:\n * application/json:\n * schema:\n * $ref: '#/components/schemas/SignupRequest'\n * responses:\n * \"200\":\n * description: The created user.",
"score": 0.8941278457641602
},
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": " * summary: Login User\n * tags: [Auth]\n * requestBody:\n * required: true\n * content:\n * application/json:\n * schema:\n * $ref: '#/components/schemas/LoginRequest'\n * responses:\n * \"200\":",
"score": 0.8908945322036743
},
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": " * security:\n * - bearerAuth: []\n * responses:\n * \"200\":\n * description: The user profile\n * \"401\":\n * description: Unauthorized\n */\nrouter.post('/me', protect, getMe);\nexport default router;",
"score": 0.8821643590927124
},
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": " * \"401\":\n * description: Invalid or expired token or refresh token was already used\n */\nrouter.post('/refresh', refreshMiddleware, refresh);\n/**\n * @swagger\n * /api/v1/auth/me:\n * post:\n * summary: Get user profile\n * tags: [Auth]",
"score": 0.8798539638519287
}
] | typescript | ('/', protect, restrictTo('admin'), fetchUsers); |
// for license and copyright look at the repository
import { IPullRequest } from './Interfaces/PullRequestTypes'
import { IReport, IReportConfigurationEntry } from './Interfaces/ReportTypes'
import { tsMarkdown, table, TableEntry, H1Entry, H3Entry, MarkdownEntry } from 'ts-markdown'
import { ConfigurationCategory, ConfigurationCategoryTitleMap } from './Report.Definitions'
export class ReportGenerator {
DescriptionHeaderLabel = 'Description'
ValueHeaderLabel = 'Value'
public Generate(pr: IPullRequest, report: IReport): string {
const header = this.GenerateHeader(pr, report)
const table = this.GenerateMeasureTable(pr, report)
const reportElements = [header, ...table]
return tsMarkdown(reportElements)
}
public GenerateHeader(pr: IPullRequest, report: IReport): H1Entry {
const title = { h1: `${report.Description} (#${pr.id})` }
return title
}
public GetMeasurementEntries(entries: IReportConfigurationEntry[]): IReportConfigurationEntry[] {
if (entries !== undefined && entries !== null && entries.length > 0) {
return entries.filter((entry) => ConfigurationCategory[entry.Info.ConfigurationCategory].endsWith('Measures'))
}
return []
}
public GenerateMeasureTable(pr: IPullRequest, report: IReport): MarkdownEntry[] {
const tables: MarkdownEntry[] = []
const entries = this.GetMeasurementEntries(report.Entries)
const categories = new Set(entries.map((entry) => entry.Info.ConfigurationCategory))
categories.forEach((category) => {
tables.push(this.GenerateCategoryTitle(category))
tables.push(this.GenerateCategoryTable(pr, report, category))
})
return tables
}
private GenerateCategoryTitle(measureCategory: ConfigurationCategory): H3Entry {
const title = { h3: `${ConfigurationCategoryTitleMap.get(measureCategory) || 'No category'}` }
return title
}
private GenerateCategoryTable(pr: IPullRequest, report: IReport, measureCategory: ConfigurationCategory): TableEntry {
const entries = this.GetMeasurementEntries(report.Entries)
const categoryEntries = entries.filter((entry) => entry.Info.ConfigurationCategory === measureCategory)
categoryEntries.forEach((entry) => {
entry | .Info.Value = entry.PullRequestCallback(pr)
})
const rows = categoryEntries.map((entry) => ({ |
Description: entry.Info.Description,
Value: entry.Info.Value,
}))
return table({
columns: [{ name: this.DescriptionHeaderLabel }, { name: this.ValueHeaderLabel }],
rows: rows,
})
}
}
| src/Report.Generation.ts | philips-software-pull-request-report-action-3390d78 | [
{
"filename": "src/Report.Measures.ts",
"retrieved_chunk": " GetReviewCount,\n GetCommentCount,\n} from './Report.Functions'\nimport { ConfigurationInputs } from './action.config.type'\nexport const UpdateConfigValues = (\n configValues: ConfigurationInputs,\n measurementEntries: Array<ReportConfigurationEntry>,\n): Array<ReportConfigurationEntry> => {\n // Update measurementEntries with config values from inputs\n measurementEntries.forEach((entry) => {",
"score": 0.8210625648498535
},
{
"filename": "src/Report.Measures.ts",
"retrieved_chunk": " // get the property value of inputs\n entry.Info.ConfigValue = (configValues as { [key: string]: string | number })[entry.Info.ConfigurationName]\n })\n return measurementEntries\n}\nexport const GetActiveMeasures = (entries: Array<ReportConfigurationEntry>): Array<ReportConfigurationEntry> => {\n return entries.filter((entry) => entry.Info.ConfigValue === 'yes')\n}\nexport const ReportConfigurationTable = new Array<ReportConfigurationEntry>()\nReportConfigurationTable.push(",
"score": 0.8137810826301575
},
{
"filename": "src/Report.Calculation.ts",
"retrieved_chunk": " })\n return latestCompletion.getTime() - earliestStart.getTime()\n}\nconst FilterReviewsByState = (pullRequest: IPullRequest, state: string) => {\n const eventTimeline = GenerateEventTimeline(pullRequest)\n const reviewEvents = eventTimeline.filter((event) => event.type === 'review')\n if (reviewEvents.length <= 0) {\n return []\n }\n const filteredReviews = reviewEvents.filter((reviewEvent) => {",
"score": 0.7883786559104919
},
{
"filename": "src/run.ts",
"retrieved_chunk": " if (process.env.GITHUB_EVENT_NAME !== 'pull_request') {\n core.setFailed('Action is running outside of PR context')\n return 0\n }\n UpdateConfig(inputsFromWorkflow, ReportConfigurationTable)\n const activeConfigValues = GetActiveMeasures(ReportConfigurationTable)\n // get PR data from github cli\n const cliPullRequestData = await GetPullRequestData(github.context.issue.number)\n const cliPullRequestDataAsString = SanitizeMarkdownComment(JSON.stringify(cliPullRequestData))\n // transform PR data to a typed model",
"score": 0.7709900140762329
},
{
"filename": "src/Report.Calculation.ts",
"retrieved_chunk": "export const GetTimeSpendInPrForLastStatusCheckRun = (pullRequest: IPullRequest) => {\n const eventTimeline = GenerateEventTimeline(pullRequest)\n const statusCheckEvents = eventTimeline\n .filter((event) => event.type === 'statusCheck')\n .map((event) => event.event_instance as StatusCheck)\n .filter((statusCheck) => statusCheck.status == 'COMPLETED')\n if (statusCheckEvents.length <= 0) {\n return 0\n }\n let earliestStart = new Date()",
"score": 0.7681815028190613
}
] | typescript | .Info.Value = entry.PullRequestCallback(pr)
})
const rows = categoryEntries.map((entry) => ({ |
// for license and copyright look at the repository
import { IReport, IReportConfigurationEntry, IReportConfigInfo, PullRequestCallback } from './Interfaces/ReportTypes'
export enum ConfigurationCategory {
None,
StaticMeasures,
TimeRelatedMeasures,
StatusCheckRelatedMeasures,
ReportGeneratorValue,
}
export const ConfigurationCategoryTitleMap = new Map<ConfigurationCategory, string>([
[ConfigurationCategory.None, 'None'],
[ConfigurationCategory.StaticMeasures, 'Static measures'],
[ConfigurationCategory.TimeRelatedMeasures, 'Time related measures'],
[ConfigurationCategory.StatusCheckRelatedMeasures, 'Status check related measures'],
[ConfigurationCategory.ReportGeneratorValue, 'Report generator related predefined strings'],
])
export class ConfigurationInfo implements IReportConfigInfo {
public Description
public PresentationValue
public Value
public ConfigurationName
public ConfigValue
public ConfigurationCategory
constructor(
label: string,
presentationValue: string | number,
value: string | number,
configName: string,
defaultConfigValue: string | number,
configurationCategory: ConfigurationCategory,
) {
this.Description = label
this.PresentationValue = presentationValue
this.Value = value
this.ConfigurationName = configName
this.ConfigValue = defaultConfigValue
this.ConfigurationCategory = configurationCategory
}
}
export class ReportConfigurationEntry implements IReportConfigurationEntry {
public Id
public Info
public | PullRequestCallback: PullRequestCallback
constructor(id = '', info: IReportConfigInfo, measureCallback: PullRequestCallback = () => '') { |
this.Id = id
this.Info = info
this.PullRequestCallback = measureCallback
}
}
export class Report implements IReport {
public Id = ''
public Description = ''
public Entries: ReportConfigurationEntry[] = []
}
| src/Report.Definitions.ts | philips-software-pull-request-report-action-3390d78 | [
{
"filename": "src/Interfaces/ReportTypes.ts",
"retrieved_chunk": " ConfigValue: string | number\n}\nexport interface IReportConfigurationEntry {\n Id: string\n Info: IReportConfigInfo\n PullRequestCallback: PullRequestCallback\n}\nexport interface IReport {\n Id: string\n Description: string",
"score": 0.9124447703361511
},
{
"filename": "src/Interfaces/ReportTypes.ts",
"retrieved_chunk": "// for license and copyright look at the repository\nimport { ConfigurationCategory } from '../Report.Definitions'\nimport { IPullRequest } from './PullRequestTypes'\nexport type PullRequestCallback = (pr: IPullRequest) => string | number\nexport interface IReportConfigInfo {\n Description: string\n PresentationValue: string | number\n Value: string | number\n ConfigurationName: string\n ConfigurationCategory: ConfigurationCategory",
"score": 0.8887299299240112
},
{
"filename": "src/Report.Generation.ts",
"retrieved_chunk": "// for license and copyright look at the repository\nimport { IPullRequest } from './Interfaces/PullRequestTypes'\nimport { IReport, IReportConfigurationEntry } from './Interfaces/ReportTypes'\nimport { tsMarkdown, table, TableEntry, H1Entry, H3Entry, MarkdownEntry } from 'ts-markdown'\nimport { ConfigurationCategory, ConfigurationCategoryTitleMap } from './Report.Definitions'\nexport class ReportGenerator {\n DescriptionHeaderLabel = 'Description'\n ValueHeaderLabel = 'Value'\n public Generate(pr: IPullRequest, report: IReport): string {\n const header = this.GenerateHeader(pr, report)",
"score": 0.8416622281074524
},
{
"filename": "src/PullRequest.Definitions.ts",
"retrieved_chunk": "}\nexport class PullRequestCommit implements IPullRequestCommit {\n public authors: ICommitAuthor[] = []\n public committer = ''\n public authorDate = ''\n public commitDate = ''\n public commitHeader = ''\n public commitBody = ''\n public commitId = ''\n public static CreateFromJson(json: unknown): IPullRequestCommit {",
"score": 0.8239749073982239
},
{
"filename": "src/run.ts",
"retrieved_chunk": " report.Entries = activeConfigValues\n report.Description = 'Test report'\n report.Id = pullRequestDataModel.id.toString()\n return report\n}\nconst IsConfigValueYes = (configValue: string): boolean => {\n return configValue.trim().toLowerCase() === 'yes'\n}\nexport const run = async (inputsFromWorkflow: ConfigurationInputs): Promise<number> => {\n // take care that action is running only in PR context",
"score": 0.8145052194595337
}
] | typescript | PullRequestCallback: PullRequestCallback
constructor(id = '', info: IReportConfigInfo, measureCallback: PullRequestCallback = () => '') { |
// for license and copyright look at the repository
import {
IPullRequest,
IPullRequestComment,
IPullRequestCommit,
IPullRequestReview,
} from './Interfaces/PullRequestTypes'
import { EventWithTime } from './Interfaces/ReportTypes'
import { StatusCheck } from './PullRequest.Definitions'
export const GenerateEventTimeline = (pullRequest: IPullRequest): EventWithTime[] => {
const events: EventWithTime[][] = []
// merge all interesting events into a single list
events.push([
{ type: 'createAt', date: new Date(pullRequest.createdAt), event_instance: pullRequest.createdAt, time: 0 },
])
events.push(
pullRequest.commits.map((commit) => ({
type: 'commit',
date: new Date(commit.authorDate),
event_instance: commit,
time: 0,
})),
)
events.push(
pullRequest.reviews.map((review) => ({
type: 'review',
date: new Date(review.submittedAt),
event_instance: review,
time: 0,
})),
)
events.push(
pullRequest.statusChecks.map((statusCheck) => ({
type: 'statusCheck',
date: new Date(statusCheck.completedAt),
event_instance: statusCheck,
time: 0,
})),
)
events.push(
pullRequest.comments.map((comment) => ({
type: 'comment',
date: new Date(comment.createdAt),
event_instance: comment,
time: 0,
})),
)
events.push([
{ type: 'mergedAt', date: new Date(pullRequest.mergedAt), event_instance: pullRequest.mergedAt, time: 0 },
])
events.push([
{ type: 'closedAt', date: new Date(pullRequest.closedAt), event_instance: pullRequest.closedAt, time: 0 },
])
// flatten the list
const flattenedEvents = events.flat()
// filter out events that don't have a valid date
const filteredEvents = flattenedEvents.filter((event) => event.date !== null)
// sort the events by date
filteredEvents.sort((a, b) => a.date.getTime() - b.date.getTime())
// now, create a list of events with the time between events
const eventsWithTime: EventWithTime[] = []
// calculate the time between events
for (let i = 0; i < filteredEvents.length; i++) {
if (i === 0) {
eventsWithTime.push({
type: filteredEvents[i].type,
date: filteredEvents[i].date,
time: 0,
event_instance: filteredEvents[i].event_instance,
})
} else {
eventsWithTime.push({
type: filteredEvents[i].type,
date: filteredEvents[i].date,
time: (filteredEvents[i].date.getTime() - filteredEvents[i - 1].date.getTime()) / 1000,
event_instance: filteredEvents[i].event_instance,
})
}
}
return eventsWithTime
}
export const MillisecondsToReadableDuration = (leadTimeInMSec: number) => {
const seconds = +(leadTimeInMSec / 1000).toFixed(1)
const minutes = +(leadTimeInMSec / (1000 * 60)).toFixed(1)
const hours = +(leadTimeInMSec / (1000 * 60 * 60)).toFixed(1)
const days = +(leadTimeInMSec / (1000 * 60 * 60 * 24)).toFixed(1)
if (seconds < 60) return `${seconds} Sec`
else if (minutes < 60) return `${minutes} Min`
else if (hours < 24) return `${hours} Hours`
else return `${days} Days`
}
export const GetMergedOrClosedDate = (pullRequest: IPullRequest): string => {
let mergedOrClosedAt = pullRequest.mergedAt
if (mergedOrClosedAt == null) mergedOrClosedAt = pullRequest.closedAt
return mergedOrClosedAt
}
export const GetLeadTimeForPullRequest = (pullRequest: IPullRequest) => {
// parse createAt as date from string
const createAt = new Date(pullRequest.createdAt)
const mergedOrClosedAt = new Date(GetMergedOrClosedDate(pullRequest))
const duration = mergedOrClosedAt.getTime() - createAt.getTime()
if (duration <= 0 || isNaN(duration)) return 0
return duration
}
export const GetTimeSpendOnBranchBeforePRCreated = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const createAtEvent = eventTimeline.find((event) => event.type === 'createAt')
const firstCommitEvent = eventTimeline.find((event) => event.type === 'commit')
if (!createAtEvent || !firstCommitEvent) return 0
const duration = createAtEvent.date.getTime() - firstCommitEvent.date.getTime()
if (duration <= 0 || isNaN(duration)) return 0
return duration
}
export const GetTimeSpendOnBranchBeforePRMerged = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const mergedAtEvent = eventTimeline.find((event) => event.type === 'mergedAt')
const firstCommitEvent = eventTimeline.find((event) => event.type === 'commit')
if (mergedAtEvent && firstCommitEvent && mergedAtEvent.date.getTime() > firstCommitEvent.date.getTime()) {
return mergedAtEvent.date.getTime() - firstCommitEvent.date.getTime()
}
return -1
}
export const GetTimeToMergeAfterLastReview = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const mergedAtEvent = eventTimeline.find((event) => event.type === 'mergedAt')
const reviewEvents = eventTimeline.filter((event) => event.type === 'review')
if (reviewEvents.length <= 0) {
return -1
}
const lastReviewEvent = reviewEvents.reverse()[0]
if (mergedAtEvent && lastReviewEvent && mergedAtEvent.date.getTime() > lastReviewEvent.date.getTime()) {
return mergedAtEvent.date.getTime() - lastReviewEvent.date.getTime()
}
return -1
}
export const GetTotalRuntimeForLastStatusCheckRun = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const statusCheckEvents = eventTimeline
.filter((event) => event.type === 'statusCheck')
.map((event) => event.event_instance as StatusCheck)
.filter((statusCheck) => statusCheck.status == 'COMPLETED')
if (statusCheckEvents.length <= 0) {
return 0
}
let totalTime = 0
statusCheckEvents.forEach((statusCheck) => {
totalTime | += new Date(statusCheck.completedAt).getTime() - new Date(statusCheck.startedAt).getTime()
})
return totalTime
} |
export const GetTimeSpendInPrForLastStatusCheckRun = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const statusCheckEvents = eventTimeline
.filter((event) => event.type === 'statusCheck')
.map((event) => event.event_instance as StatusCheck)
.filter((statusCheck) => statusCheck.status == 'COMPLETED')
if (statusCheckEvents.length <= 0) {
return 0
}
let earliestStart = new Date()
let latestCompletion = new Date(0, 0, 0)
statusCheckEvents.forEach((statusCheckEvent) => {
const completedDate = new Date(statusCheckEvent.completedAt)
const startedDate = new Date(statusCheckEvent.startedAt)
if (startedDate < earliestStart) {
earliestStart = startedDate
}
if (completedDate > latestCompletion) {
latestCompletion = completedDate
}
})
return latestCompletion.getTime() - earliestStart.getTime()
}
const FilterReviewsByState = (pullRequest: IPullRequest, state: string) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const reviewEvents = eventTimeline.filter((event) => event.type === 'review')
if (reviewEvents.length <= 0) {
return []
}
const filteredReviews = reviewEvents.filter((reviewEvent) => {
const review = reviewEvent.event_instance as IPullRequestReview
return review.state === state
})
return filteredReviews
}
export const GetNumberOfCommentOnlyReviews = (pullRequest: IPullRequest) => {
return FilterReviewsByState(pullRequest, 'COMMENTED').length
}
export const GetNumberOfRequestedChangeReviews = (pullRequest: IPullRequest) => {
return FilterReviewsByState(pullRequest, 'CHANGES_REQUESTED').length
}
export const GetNumberOfApprovedReviews = (pullRequest: IPullRequest) => {
return FilterReviewsByState(pullRequest, 'APPROVED').length
}
export const GetUniqueReviewParticipants = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const reviewEvents = eventTimeline.filter((event) => event.type === 'review')
// extract unique reviewers from review events
return reviewEvents
.map((reviewEvent) => reviewEvent.event_instance as IPullRequestReview)
.map((review) => review.authorLogin)
.filter((value, index, self) => self.indexOf(value) === index)
}
export const GetUniqueCommentParticipants = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const commentEvents = eventTimeline.filter((event) => event.type === 'comment')
// extract unique commenter from review events
return commentEvents
.map((commentEvent) => commentEvent.event_instance as IPullRequestComment)
.map((comment) => comment.authorLogin)
.filter((value, index, self) => self.indexOf(value) === index)
}
export const GetUniqueCommitterParticipants = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const commitEvents = eventTimeline.filter((event) => event.type === 'commit')
// extract unique reviewers from review events
return commitEvents
.map((commitEvent) => commitEvent.event_instance as IPullRequestCommit)
.map((commit) => commit.authors.filter((author) => author.login !== null).map((author) => author.login))
.flat()
.filter((value, index, self) => self.indexOf(value) === index)
}
export const GetNumberOfActivePullRequestReviewParticipants = (pullRequest: IPullRequest) => {
const uniqueReviewers = GetUniqueReviewParticipants(pullRequest)
const uniqueCommenter = GetUniqueCommentParticipants(pullRequest)
return uniqueReviewers.concat(uniqueCommenter).filter((value, index, self) => self.indexOf(value) === index).length
}
export const GetNumberOfPullRequestCommitter = (pullRequest: IPullRequest) => {
return GetUniqueCommitterParticipants(pullRequest).length
}
export const GetTotalNumberOfParticipants = (pullRequest: IPullRequest) => {
return GetNumberOfActivePullRequestReviewParticipants(pullRequest) + GetNumberOfPullRequestCommitter(pullRequest)
}
| src/Report.Calculation.ts | philips-software-pull-request-report-action-3390d78 | [
{
"filename": "src/Report.Generation.ts",
"retrieved_chunk": " const categoryEntries = entries.filter((entry) => entry.Info.ConfigurationCategory === measureCategory)\n categoryEntries.forEach((entry) => {\n entry.Info.Value = entry.PullRequestCallback(pr)\n })\n const rows = categoryEntries.map((entry) => ({\n Description: entry.Info.Description,\n Value: entry.Info.Value,\n }))\n return table({\n columns: [{ name: this.DescriptionHeaderLabel }, { name: this.ValueHeaderLabel }],",
"score": 0.6700834035873413
},
{
"filename": "src/Report.Generation.ts",
"retrieved_chunk": " return entries.filter((entry) => ConfigurationCategory[entry.Info.ConfigurationCategory].endsWith('Measures'))\n }\n return []\n }\n public GenerateMeasureTable(pr: IPullRequest, report: IReport): MarkdownEntry[] {\n const tables: MarkdownEntry[] = []\n const entries = this.GetMeasurementEntries(report.Entries)\n const categories = new Set(entries.map((entry) => entry.Info.ConfigurationCategory))\n categories.forEach((category) => {\n tables.push(this.GenerateCategoryTitle(category))",
"score": 0.663184642791748
},
{
"filename": "src/Report.Generation.ts",
"retrieved_chunk": " const table = this.GenerateMeasureTable(pr, report)\n const reportElements = [header, ...table]\n return tsMarkdown(reportElements)\n }\n public GenerateHeader(pr: IPullRequest, report: IReport): H1Entry {\n const title = { h1: `${report.Description} (#${pr.id})` }\n return title\n }\n public GetMeasurementEntries(entries: IReportConfigurationEntry[]): IReportConfigurationEntry[] {\n if (entries !== undefined && entries !== null && entries.length > 0) {",
"score": 0.6621592044830322
},
{
"filename": "src/Report.Measures.ts",
"retrieved_chunk": " // get the property value of inputs\n entry.Info.ConfigValue = (configValues as { [key: string]: string | number })[entry.Info.ConfigurationName]\n })\n return measurementEntries\n}\nexport const GetActiveMeasures = (entries: Array<ReportConfigurationEntry>): Array<ReportConfigurationEntry> => {\n return entries.filter((entry) => entry.Info.ConfigValue === 'yes')\n}\nexport const ReportConfigurationTable = new Array<ReportConfigurationEntry>()\nReportConfigurationTable.push(",
"score": 0.6603893041610718
},
{
"filename": "src/PullRequest.Definitions.ts",
"retrieved_chunk": " startedAt: string\n completedAt: string\n conclusion: string\n status: string\n name: string\n detailsUrl: string\n }\n const statusCheck = new StatusCheck()\n statusCheck.workflowName = jsonObject['workflowName']\n statusCheck.startedAt = jsonObject['startedAt']",
"score": 0.6530741453170776
}
] | typescript | += new Date(statusCheck.completedAt).getTime() - new Date(statusCheck.startedAt).getTime()
})
return totalTime
} |
import { sign } from 'jsonwebtoken';
import { IUser } from '../types';
import { Request, Response } from 'express';
import User from '../model';
import { AppError } from '../../../utils/appError';
import { catchAsync } from '../../../utils/catchAsync';
import redisService from '../../../utils/redis';
const accessToken = (user: { _id: string; name: string; email: string; role: string }) => {
return sign(
{ id: user._id, name: user.name, email: user.email, type: process.env.JWT_ACCESS, role: user.role },
process.env.JWT_KEY_SECRET as string,
{
subject: user.email,
expiresIn: process.env.JWT_EXPIRES_IN,
audience: process.env.JWT_AUDIENCE,
issuer: process.env.JWT_ISSUER,
},
);
};
const refreshToken = (user: { _id: string; name: string; email: string; role: string }) => {
return sign(
{ id: user._id, name: user.name, email: user.email, type: process.env.JWT_REFRESH, role: user.role },
process.env.JWT_KEY_REFRESH as string,
{
subject: user.email,
expiresIn: process.env.JWT_EXPIRES_IN,
audience: process.env.JWT_AUDIENCE,
issuer: process.env.JWT_ISSUER,
},
);
};
const createSendToken = (user: IUser, statusCode: number, req: Request, res: Response) => {
const acess = accessToken(user);
const refresh = refreshToken(user);
// Remove password from output
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const { name, email, role, ...otherUserData } = user;
res.status(statusCode).json({
status: 'success',
acess,
refresh,
data: {
name,
email,
role,
},
});
};
export const signup = catchAsync(async (req, res) => {
const newUser = await User.create({
name: req.body.name,
email: req.body.email,
password: req.body.password,
});
createSendToken(newUser, 201, req, res);
});
export const login = catchAsync(async | (req, res, next) => { |
const { email, password } = req.body;
// 1) Check if email and password exist
if (!email || !password) {
return next(new AppError('Please provide email and password!', 400));
}
// 2) Check if user exists && password is correct
const user: any = await User.findOne({ email }).select('+password');
if (!user || !(await user.correctPassword(password, user.password))) {
return next(new AppError('Incorrect email or password', 401));
}
// 3) If everything ok, send token to client
createSendToken(user, 200, req, res);
});
export const getMe = catchAsync(async (req, res) => {
const user = req.user;
// 3) If everything ok, send token to client
res.status(200).json({ message: 'user sucessfully fetched!', user });
});
export function logout(req: Request, res: Response) {
res.cookie('jwt', 'loggedout', {
expires: new Date(Date.now() + 10 * 1000),
httpOnly: true,
});
res.status(200).json({ status: 'success' });
}
export async function refresh(req: Request, res: Response) {
const user: any = req.user;
await redisService.set({
key: user?.token,
value: '1',
timeType: 'EX',
time: parseInt(process.env.JWT_REFRESH_TIME || '', 10),
});
const refresh = refreshToken(user);
return res.status(200).json({ status: 'sucess', refresh });
}
export async function fetchUsers(req: Request, res: Response) {
const body = req.body;
console.log({ body });
try {
const users = await User.find();
return res.status(200).json({ message: 'sucessfully fetch users', data: users });
} catch (error: any) {
new AppError(error.message, 201);
}
}
export async function deleteUser(req: Request, res: Response) {
const id = req.params.id;
try {
await User.deleteOne({ _id: id });
return res.status(200).json({ message: 'sucessfully deleted users' });
} catch (error: any) {
new AppError(error.message, 201);
}
}
| src/modules/auth/service/index.ts | walosha-BACKEND_DEV_TESTS-db2fcb4 | [
{
"filename": "src/middleware/protect.ts",
"retrieved_chunk": "import { NextFunction, Request, Response } from 'express';\nimport { JwtPayload, verify } from 'jsonwebtoken';\nimport { AppError } from '../utils/appError';\nimport { catchAsync } from '../utils/catchAsync';\nimport User from '../modules/auth/model';\nexport const protect = catchAsync(async (req: Request, res: Response, next: NextFunction) => {\n // 1) Getting token and check of it's there\n let token;\n if (req.headers.authorization && req.headers.authorization.startsWith('Bearer')) {\n token = req.headers.authorization.split(' ')[1];",
"score": 0.8133342266082764
},
{
"filename": "src/middleware/refresh.ts",
"retrieved_chunk": "import jwt, { JwtPayload } from 'jsonwebtoken';\nimport redisService from '../utils/redis';\nimport { AppError } from '../utils/appError';\nimport { NextFunction, Request, Response } from 'express';\nexport const refreshMiddleware: any = async (req: Request, res: Response, next: NextFunction) => {\n if (req.body?.refresh) {\n const token = req.body.refresh;\n try {\n const decoded: any = jwt.verify(token, process.env.JWT_KEY_REFRESH as string) as JwtPayload;\n if (",
"score": 0.7976261377334595
},
{
"filename": "src/modules/account/service/index.ts",
"retrieved_chunk": "import { Request, Response } from 'express';\nimport Account from '../model';\nexport const transferFund = async (req: Request, res: Response) => {\n const { fromAccountId, toAccountId, amount } = req.body;\n try {\n let srcAccount: any = await Account.findById(fromAccountId);\n let destAccount: any = await Account.findById(toAccountId);\n if (String(srcAccount.user) == String(destAccount.user)) {\n return res.status(400).json({\n error: 'Cannot transfer to own acccount',",
"score": 0.7975401282310486
},
{
"filename": "src/middleware/isLoggedIn.ts",
"retrieved_chunk": "/* eslint-disable @typescript-eslint/no-explicit-any */\nimport { NextFunction, Request, Response } from 'express';\nimport jwt from 'jsonwebtoken';\nimport User from '../modules/auth/model';\n// Only for rendered pages, no errors!\nexport async function isLoggedIn(req: Request, res: Response, next: NextFunction) {\n if (req.cookies.jwt) {\n try {\n // 1) verify token\n const decoded: any = await jwt.verify(req.cookies.jwt, process.env.JWT_KEY_SECRET as string);",
"score": 0.7974755764007568
},
{
"filename": "src/middleware/refresh.ts",
"retrieved_chunk": " req.user = {\n email: decoded.email,\n name: decoded.name,\n role: decoded.role,\n token,\n };\n next();\n return;\n } catch (err) {\n console.log({ err });",
"score": 0.793700635433197
}
] | typescript | (req, res, next) => { |
/**
* @swagger
* components:
* schemas:
* User:
* type: object
* required:
* - name
* - email
* properties:
* name:
* type: string
* description: The user name
* email:
* type: string
* format: email
* description: The user email address
* password:
* type: string
* description: The user password (hashed)
* role:
* type: string
* enum: [user, admin]
* description: The user role
* default: user
* example:
* name: John Doe
* email: [email protected]
* password: $2a$10$gR06R4K1NM4p4b4ELq.LlOTzq3Dcxj2iPwE5U/O2MDE70o9noemhO
* role: user
*/
import express from 'express';
import { deleteUser, fetchUsers } from '../service';
import { protect, restrictTo } from '../../../middleware';
const router = express.Router();
/**
* @swagger
* /api/v1/users:
* get:
* summary: Retrieve all users
* tags: [User]
* security:
* - bearerAuth: []
* responses:
* "200":
* description: A list of users
* content:
* application/json:
* schema:
* type: array
* items:
* $ref: '#/components/schemas/User'
* "401":
* description: Unauthorized
*/
router.get('/', protect, restrictTo('admin'), fetchUsers);
/**
* @swagger
* /api/v1/users/{id}:
* delete:
* summary: Delete a user by ID
* tags: [User]
* security:
* - bearerAuth: []
* parameters:
* - in: path
* name: id
* schema:
* type: string
* required: true
* description: The ID of the user to delete
* responses:
* "204":
* description: User deleted successfully
* "401":
* description: Unauthorized
* "404":
* description: User not found
*/
// A simple case where users can only delete themselves not the admin
router | .delete('/:id', restrictTo('user'), deleteUser); |
export default router;
| src/modules/auth/controller/users.ts | walosha-BACKEND_DEV_TESTS-db2fcb4 | [
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": " * security:\n * - bearerAuth: []\n * responses:\n * \"200\":\n * description: The user profile\n * \"401\":\n * description: Unauthorized\n */\nrouter.post('/me', protect, getMe);\nexport default router;",
"score": 0.9125770330429077
},
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": " * tags: [Auth]\n * requestBody:\n * required: true\n * content:\n * application/json:\n * schema:\n * $ref: '#/components/schemas/SignupRequest'\n * responses:\n * \"200\":\n * description: The created user.",
"score": 0.8339521884918213
},
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": " * \"401\":\n * description: Invalid or expired token or refresh token was already used\n */\nrouter.post('/refresh', refreshMiddleware, refresh);\n/**\n * @swagger\n * /api/v1/auth/me:\n * post:\n * summary: Get user profile\n * tags: [Auth]",
"score": 0.8334336280822754
},
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": " * email:\n * type: string\n * description: The user email address\n * password:\n * type: string\n * description: The user password\n * example:\n * email: [email protected]\n * password: password123\n */",
"score": 0.8279905319213867
},
{
"filename": "src/modules/account/controller/index.ts",
"retrieved_chunk": " * description: The amount of funds to transfer.\n * example: 1000.00\n * tag:\n * type: string\n * description: The tag associated with the transfer.\n * example: \"Rent payment\"\n * responses:\n * '200':\n * description: Successful transfer of funds\n * '400':",
"score": 0.8279760479927063
}
] | typescript | .delete('/:id', restrictTo('user'), deleteUser); |
import { sign } from 'jsonwebtoken';
import { IUser } from '../types';
import { Request, Response } from 'express';
import User from '../model';
import { AppError } from '../../../utils/appError';
import { catchAsync } from '../../../utils/catchAsync';
import redisService from '../../../utils/redis';
const accessToken = (user: { _id: string; name: string; email: string; role: string }) => {
return sign(
{ id: user._id, name: user.name, email: user.email, type: process.env.JWT_ACCESS, role: user.role },
process.env.JWT_KEY_SECRET as string,
{
subject: user.email,
expiresIn: process.env.JWT_EXPIRES_IN,
audience: process.env.JWT_AUDIENCE,
issuer: process.env.JWT_ISSUER,
},
);
};
const refreshToken = (user: { _id: string; name: string; email: string; role: string }) => {
return sign(
{ id: user._id, name: user.name, email: user.email, type: process.env.JWT_REFRESH, role: user.role },
process.env.JWT_KEY_REFRESH as string,
{
subject: user.email,
expiresIn: process.env.JWT_EXPIRES_IN,
audience: process.env.JWT_AUDIENCE,
issuer: process.env.JWT_ISSUER,
},
);
};
const createSendToken = (user: IUser, statusCode: number, req: Request, res: Response) => {
const acess = accessToken(user);
const refresh = refreshToken(user);
// Remove password from output
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const { name, email, role, ...otherUserData } = user;
res.status(statusCode).json({
status: 'success',
acess,
refresh,
data: {
name,
email,
role,
},
});
};
export const signup | = catchAsync(async (req, res) => { |
const newUser = await User.create({
name: req.body.name,
email: req.body.email,
password: req.body.password,
});
createSendToken(newUser, 201, req, res);
});
export const login = catchAsync(async (req, res, next) => {
const { email, password } = req.body;
// 1) Check if email and password exist
if (!email || !password) {
return next(new AppError('Please provide email and password!', 400));
}
// 2) Check if user exists && password is correct
const user: any = await User.findOne({ email }).select('+password');
if (!user || !(await user.correctPassword(password, user.password))) {
return next(new AppError('Incorrect email or password', 401));
}
// 3) If everything ok, send token to client
createSendToken(user, 200, req, res);
});
export const getMe = catchAsync(async (req, res) => {
const user = req.user;
// 3) If everything ok, send token to client
res.status(200).json({ message: 'user sucessfully fetched!', user });
});
export function logout(req: Request, res: Response) {
res.cookie('jwt', 'loggedout', {
expires: new Date(Date.now() + 10 * 1000),
httpOnly: true,
});
res.status(200).json({ status: 'success' });
}
export async function refresh(req: Request, res: Response) {
const user: any = req.user;
await redisService.set({
key: user?.token,
value: '1',
timeType: 'EX',
time: parseInt(process.env.JWT_REFRESH_TIME || '', 10),
});
const refresh = refreshToken(user);
return res.status(200).json({ status: 'sucess', refresh });
}
export async function fetchUsers(req: Request, res: Response) {
const body = req.body;
console.log({ body });
try {
const users = await User.find();
return res.status(200).json({ message: 'sucessfully fetch users', data: users });
} catch (error: any) {
new AppError(error.message, 201);
}
}
export async function deleteUser(req: Request, res: Response) {
const id = req.params.id;
try {
await User.deleteOne({ _id: id });
return res.status(200).json({ message: 'sucessfully deleted users' });
} catch (error: any) {
new AppError(error.message, 201);
}
}
| src/modules/auth/service/index.ts | walosha-BACKEND_DEV_TESTS-db2fcb4 | [
{
"filename": "src/middleware/refresh.ts",
"retrieved_chunk": " req.user = {\n email: decoded.email,\n name: decoded.name,\n role: decoded.role,\n token,\n };\n next();\n return;\n } catch (err) {\n console.log({ err });",
"score": 0.8695902824401855
},
{
"filename": "src/constant/swaggerOptions.ts",
"retrieved_chunk": " },\n },\n },\n security: [{ bearerAuth: [] }],\n servers: [\n {\n url: 'http://localhost:3000',\n description: 'Development server',\n },\n ],",
"score": 0.8200947046279907
},
{
"filename": "src/modules/account/model/index.ts",
"retrieved_chunk": "import mongoose, { Document, Model, Schema, model } from 'mongoose';\ninterface AccountAttrs {\n id: string;\n user: string;\n accountType: any;\n balance: number;\n}\ninterface IAccount extends Document {\n id: string;\n user: string;",
"score": 0.8051189184188843
},
{
"filename": "src/modules/auth/model/index.ts",
"retrieved_chunk": " password: string;\n}\nconst userSchema = new Schema<IUser>({\n name: {\n type: String,\n required: [true, 'Please tell us your name!'],\n },\n email: {\n type: String,\n required: [true, 'Please provide your email'],",
"score": 0.7972834706306458
},
{
"filename": "src/modules/account/model/index.ts",
"retrieved_chunk": " },\n user: {\n type: mongoose.Types.ObjectId,\n ref: 'User',\n },\n balance: {\n type: Number,\n default: 0,\n },\n});",
"score": 0.7945507764816284
}
] | typescript | = catchAsync(async (req, res) => { |
/**
* @swagger
* components:
* schemas:
* SignupRequest:
* type: object
* required:
* - email
* - password
* - name
* properties:
* name:
* type: string
* description: The user name
* email:
* type: string
* description: The user email address
* password:
* type: string
* description: The user password
* example:
* name: John Doe
* email: [email protected]
* password: password123
* LoginRequest:
* type: object
* required:
* - email
* - password
* properties:
* email:
* type: string
* description: The user email address
* password:
* type: string
* description: The user password
* example:
* email: [email protected]
* password: password123
*/
import express from 'express';
import { getMe, login, refresh, signup } from '../service';
import { refreshMiddleware } from '../../../middleware/refresh';
import { protect } from '../../../middleware';
const router = express.Router();
/**
* @swagger
* /api/v1/auth/signup:
* post:
* summary: Creates an account
* tags: [Auth]
* requestBody:
* required: true
* content:
* application/json:
* schema:
* $ref: '#/components/schemas/SignupRequest'
* responses:
* "200":
* description: The created user.
* content:
* application/json:
* schema:
* $ref: '#/components/schemas/User'
*/
router.post | ('/signup', signup); |
/**
* @swagger
* /api/v1/auth/login:
* post:
* summary: Login User
* tags: [Auth]
* requestBody:
* required: true
* content:
* application/json:
* schema:
* $ref: '#/components/schemas/LoginRequest'
* responses:
* "200":
* description: The authenticated user.
* content:
* application/json:
* schema:
* $ref: '#/components/schemas/User'
*/
router.post('/login', login);
/**
* @swagger
* /api/v1/auth/refresh:
* post:
* summary: Refreshes the access token
* tags: [Auth]
* requestBody:
* required: true
* content:
* application/json:
* schema:
* type: object
* required:
* - refresh
* properties:
* refresh:
* type: string
* description: Refresh token
* example: eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6IjY0NGYwMjg0MWRmNGJlYzliOWI3ZjlhYSIsImlhdCI6MTY4Mjg5OTU4OCwiZXhwIjoxNjgzMDcyMzg4fQ.Bt2kzyxyUEtUy9pLvr0zSzpI8_xTaM6KulO2mwYztbQ
* responses:
* "200":
* description: The new access token
* content:
* application/json:
* schema:
* type: object
* properties:
* accessToken:
* type: string
* description: Access token
* example: eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJKb2huIERvZSIsImlhdCI6MTUxNjIzOTAyMn0.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c
* "400":
* description: Invalid request or refresh token is not present
* "401":
* description: Invalid or expired token or refresh token was already used
*/
router.post('/refresh', refreshMiddleware, refresh);
/**
* @swagger
* /api/v1/auth/me:
* post:
* summary: Get user profile
* tags: [Auth]
* security:
* - bearerAuth: []
* responses:
* "200":
* description: The user profile
* "401":
* description: Unauthorized
*/
router.post('/me', protect, getMe);
export default router;
| src/modules/auth/controller/index.ts | walosha-BACKEND_DEV_TESTS-db2fcb4 | [
{
"filename": "src/modules/auth/controller/users.ts",
"retrieved_chunk": " * tags: [User]\n * security:\n * - bearerAuth: []\n * responses:\n * \"200\":\n * description: A list of users\n * content:\n * application/json:\n * schema:\n * type: array",
"score": 0.8877625465393066
},
{
"filename": "src/modules/account/controller/index.ts",
"retrieved_chunk": " * summary: Transfer funds between accounts\n * security:\n * - BearerAuth: []\n * requestBody:\n * required: true\n * content:\n * application/json:\n * schema:\n * type: object\n * properties:",
"score": 0.8323175311088562
},
{
"filename": "src/modules/auth/controller/users.ts",
"retrieved_chunk": "/**\n * @swagger\n * components:\n * schemas:\n * User:\n * type: object\n * required:\n * - name\n * - email\n * properties:",
"score": 0.8264278769493103
},
{
"filename": "src/modules/auth/controller/users.ts",
"retrieved_chunk": " * items:\n * $ref: '#/components/schemas/User'\n * \"401\":\n * description: Unauthorized\n */\nrouter.get('/', protect, restrictTo('admin'), fetchUsers);\n/**\n * @swagger\n * /api/v1/users/{id}:\n * delete:",
"score": 0.8234728574752808
},
{
"filename": "src/modules/account/controller/index.ts",
"retrieved_chunk": "/**\n * @swagger\n * components:\n * schemas:\n * SignupRequest:\n * type: object\n * required:\n * - email\n * - password\n * - name",
"score": 0.8227013349533081
}
] | typescript | ('/signup', signup); |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.